diff --git a/doc/_images/xrd1d_background_1.png b/doc/_images/xrd1d_background_1.png new file mode 100755 index 000000000..c4d9af43d Binary files /dev/null and b/doc/_images/xrd1d_background_1.png differ diff --git a/doc/_images/xrd1d_cif_search.png b/doc/_images/xrd1d_cif_search.png new file mode 100755 index 000000000..661a463d8 Binary files /dev/null and b/doc/_images/xrd1d_cif_search.png differ diff --git a/doc/_images/xrd1d_data_with_cif.png b/doc/_images/xrd1d_data_with_cif.png new file mode 100755 index 000000000..11479cc3e Binary files /dev/null and b/doc/_images/xrd1d_data_with_cif.png differ diff --git a/doc/_images/xrd1d_main_with_tiff.png b/doc/_images/xrd1d_main_with_tiff.png new file mode 100755 index 000000000..ba1a84a9c Binary files /dev/null and b/doc/_images/xrd1d_main_with_tiff.png differ diff --git a/doc/_images/xrd1d_tiff_import.png b/doc/_images/xrd1d_tiff_import.png new file mode 100755 index 000000000..bd2c940db Binary files /dev/null and b/doc/_images/xrd1d_tiff_import.png differ diff --git a/doc/_images/xrd1d_tiff_integrated.png b/doc/_images/xrd1d_tiff_integrated.png new file mode 100755 index 000000000..35728a5ed Binary files /dev/null and b/doc/_images/xrd1d_tiff_integrated.png differ diff --git a/doc/_images/xrd1d_withbkg_d.png b/doc/_images/xrd1d_withbkg_d.png new file mode 100755 index 000000000..21be456ed Binary files /dev/null and b/doc/_images/xrd1d_withbkg_d.png differ diff --git a/doc/_images/xrd1d_withbkg_q.png b/doc/_images/xrd1d_withbkg_q.png new file mode 100755 index 000000000..f02e42332 Binary files /dev/null and b/doc/_images/xrd1d_withbkg_q.png differ diff --git a/doc/_images/xrd1d_withbkg_tth.png b/doc/_images/xrd1d_withbkg_tth.png new file mode 100755 index 000000000..1fd6dea98 Binary files /dev/null and b/doc/_images/xrd1d_withbkg_tth.png differ diff --git a/doc/index.rst b/doc/index.rst index 659c78d2e..96deb94b2 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -108,6 +108,7 @@ Table of Contents guis.rst xasviewer.rst wxmap_viewer.rst + xrd1d_viewer.rst qtrixs.rst python.rst larchlang.rst diff --git a/doc/wxmap_viewer.rst b/doc/wxmap_viewer.rst index 4e4f47f81..12711ec34 100644 --- a/doc/wxmap_viewer.rst +++ b/doc/wxmap_viewer.rst @@ -1,20 +1,18 @@ .. _mapviewer_app: -========================== -GSECARS Mapviewr +XRFMap Viewr ========================== .. _wxmplot: https://newville.github.io/wxmplot .. _matplotlib: https://matplotlib.org/ -The GSECARS Mapviewer is one of the main GUI applications of Larch, +The GSECARS XRFMap Viewer is one of the main GUI applications of Larch, allowing users to read and display HDF5 files containing X-ray fluorescence -maps from synchrotron X-ray microprobes. The program plays the dual role -of displaying existing X-ray microprobe maps in HDF5 format and assembling -those HDF5 X-ray microprobe map files from the raw data as collected APS -beamline 13-ID-E or other microprobes using the same Epics scanning -software used there. +maps from synchrotron X-ray microprobes. The program plays the dual role of +displaying existing X-ray microprobe maps in HDF5 format and assembling those +HDF5 X-ray microprobe map files from the raw data as collected APS beamline +13-ID-E or other microprobes using the same Epics scanning software used there. diff --git a/doc/xasviewer.rst b/doc/xasviewer.rst index cb624d8cb..6eda0d931 100644 --- a/doc/xasviewer.rst +++ b/doc/xasviewer.rst @@ -6,17 +6,17 @@ Larix (was XAS Viewer) =========================== -The Larix Application gives a graphical user interface (GUI) for the +The `Larix` Application gives a graphical user interface (GUI) for the visualization and analyis of X-ray absorption spectroscopy (XAS) data, both -XANES and EXAFS. It is deliberately patterned after the Demeter Package -(Athena and Artemis programs), and shares many concepts and presentation ideas -with these progrqms. As a GUI Program, Larix should seem very "Athena-like" -though of course there will be several differences. We hope that many of -these differences will be "improvements". By using Larch for all of its -analysis steps, Larix not only provides interactive data visualization, -exploration, and analysis of XAS data, but also records those steps as Larch / -Python commands that can be saved and reproduced outside of the GUI, say to -enable batch processing of large volumes of data. +XANES and EXAFS. It is deliberately patterned after the Demeter Package (Athena +and Artemis programs), and shares many concepts and presentation ideas with +these progrqms. As a GUI Program, Larix should seem very "Athena-like" though +of course there will be several differences. We hope that many of these +differences will be "improvements". By using Larch for all of its analysis +steps, Larix not only provides interactive data visualization, exploration, and +analysis of XAS data, but also records those steps as Larch / Python commands +that can be saved and reproduced outside of the GUI, say to enable batch +processing of large volumes of data. Larix is still in active development with new features driving much of the development and releases of Larch version for the past few years. At this diff --git a/doc/xrd1d_viewer.rst b/doc/xrd1d_viewer.rst new file mode 100644 index 000000000..67bf8c671 --- /dev/null +++ b/doc/xrd1d_viewer.rst @@ -0,0 +1,21 @@ +.. _larchxrd1d-chapter: + +.. _larchxrd1d_app: + +Larch XRD1D Viewer +========================== + + +The `Larch XRD1D` application allows simple viewing and manipulation of +1-dimensional XRD patterns of diffracted intensity as a function of scattering +wavenumber or angle: :math:`I(q)`, :math:`I(2\theta)`. The application allows + + 1. reading and comparison of multiple XRD patterns. + 2. fitting and subtracting a background from an XRD pattern. + + + + +.. image:: _images/xrd1d_main_with_tiff.png + :target: _images/xrd1d_main_with_tiff.png + :width: 70% diff --git a/examples/feffit/doc_feffit1.out b/examples/feffit/doc_feffit1.out index 7c671c433..9cee19be9 100644 --- a/examples/feffit/doc_feffit1.out +++ b/examples/feffit/doc_feffit1.out @@ -1,41 +1,41 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 36 - n_variables = 4 - n_data_points = 104 - n_independent = 15.2602829 - chi_square = 112.916714 - reduced chi_square = 10.0278754 - r-factor = 0.00272181 - Akaike info crit = 38.5418835 - Bayesian info crit = 41.4428977 - -[[Dataset]] - unique_id = 'dc5vezx7' - fit space = 'r' - r-range = 1.400, 3.000 - k-range = 3.000, 17.000 - k window, dk = 'kaiser', 4.000 - paths used in fit = ['feffcu01.dat'] - k-weight = 2 - epsilon_k = Array(mean=6.1910e-4, std=0.0016545) - epsilon_r = 0.0208085 - n_independent = 15.260 + n_function_calls = 36 + n_variables = 4 + n_data_points = 104 + n_independent = 15.2602829 + chi_square = 112.916714 + reduced chi_square = 10.0278754 + r-factor = 0.00272181 + Akaike info crit = 38.5418835 + Bayesian info crit = 41.4428977 [[Variables]] - amp = 0.9315590 +/- 0.0391633 (init= 1.0000000) - del_e0 = 4.3577673 +/- 0.5113492 (init= 0.0000000) - del_r = -0.0060167 +/- 0.0026133 (init= 0.0000000) - sig2 = 0.0086697 +/- 3.0785e-4 (init= 0.0000000) + amp = 0.9315590 +/- 0.0391633 (init= 1.0000000) + del_e0 = 4.3577673 +/- 0.5113492 (init= 0.0000000) + del_r = -0.0060167 +/- 0.0026133 (init= 0.0000000) + sig2 = 0.0086697 +/- 3.0785e-4 (init= 0.0000000) -[[Correlations]] (unreported correlations are < 0.100) - amp, sig2 = 0.928 - del_e0, del_r = 0.920 - del_r, sig2 = 0.159 - amp, del_r = 0.137 +[[Correlations]] (unreported correlations are < 0.100) + amp, sig2 = +0.928 + del_e0, del_r = +0.920 + del_r, sig2 = +0.159 + amp, del_r = +0.137 + +[[Dataset]] + unique_id = 'dc5vezx7' + fit space = 'r' + r-range = 1.400, 3.000 + k-range = 3.000, 17.000 + k window, dk = 'kaiser', 4.000 + paths used in fit = ['feffcu01.dat'] + k-weight = 2 + epsilon_k = Array(mean=6.1910e-4, std=0.0016545) + epsilon_r = 0.0208085 + n_independent = 15.260 [[Paths]] - = Path 'p3tf6mewgr' = Cu K Edge + = Path 'p3tf6mewg' = Cu K Edge feffdat file = feffcu01.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) diff --git a/examples/feffit/doc_feffit2.out b/examples/feffit/doc_feffit2.out index 08e44ba9c..57efb40e4 100644 --- a/examples/feffit/doc_feffit2.out +++ b/examples/feffit/doc_feffit2.out @@ -1,58 +1,58 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 107 - n_variables = 7 - n_data_points = 138 - n_independent = 19.7166213 - chi_square = 12.9083391 - reduced chi_square = 1.01507616 - r-factor = 0.00251319 - Akaike info crit = 5.64826681 - Bayesian info crit = 12.5185008 - -[[Dataset]] - unique_id = 'ds3qtgjs' - fit space = 'r' - r-range = 1.400, 3.500 - k-range = 3.000, 17.000 - k window, dk = 'kaiser', 4.000 - paths used in fit = ['feff0001.dat', 'feff0002.dat', 'feff0003.dat'] - k-weight = 2 - epsilon_k = Array(mean=0.0017550, std=0.0058069) - epsilon_r = 0.0589866 - n_independent = 19.717 + n_function_calls = 107 + n_variables = 7 + n_data_points = 138 + n_independent = 19.7166213 + chi_square = 12.9083391 + reduced chi_square = 1.01507616 + r-factor = 0.00251319 + Akaike info crit = 5.64826681 + Bayesian info crit = 12.5185008 [[Variables]] - alpha = 0.0028114 +/- 0.0029801 (init= 0.0000000) - amp = 0.9362805 +/- 0.0362098 (init= 1.0000000) - c3_1 = 1.4440e-4 +/- 8.2197e-5 (init= 0.0020000) - del_e0 = 5.7042623 +/- 0.8197285 (init= 3.0000000) - sig2_1 = 0.0086805 +/- 2.8390e-4 (init= 0.0020000) - sig2_2 = 0.0131046 +/- 0.0011358 (init= 0.0020000) - sig2_3 = 0.0063292 +/- 0.0030901 (init= 0.0020000) + alpha = 0.0028114 +/- 0.0029801 (init= 0.0000000) + amp = 0.9362805 +/- 0.0362098 (init= 1.0000000) + c3_1 = 1.4440e-4 +/- 8.2197e-5 (init= 0.0020000) + del_e0 = 5.7042623 +/- 0.8197285 (init= 3.0000000) + sig2_1 = 0.0086805 +/- 2.8390e-4 (init= 0.0020000) + sig2_2 = 0.0131046 +/- 0.0011358 (init= 0.0020000) + sig2_3 = 0.0063292 +/- 0.0030901 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.100) - alpha, c3_1 = 0.952 - alpha, del_e0 = 0.950 - amp, sig2_1 = 0.929 - c3_1, del_e0 = 0.837 - amp, sig2_2 = 0.298 - sig2_1, sig2_2 = 0.276 - sig2_1, sig2_3 = 0.213 - amp, sig2_3 = 0.213 - c3_1, sig2_3 = -0.189 - alpha, amp = 0.185 - alpha, sig2_1 = 0.172 - c3_1, sig2_2 = 0.168 - alpha, sig2_2 = 0.152 - alpha, sig2_3 = -0.148 - amp, del_e0 = 0.148 - amp, c3_1 = 0.145 - del_e0, sig2_1 = 0.143 - c3_1, sig2_1 = 0.124 +[[Correlations]] (unreported correlations are < 0.100) + alpha, c3_1 = 0.952 + alpha, del_e0 = 0.950 + amp, sig2_1 = 0.929 + c3_1, del_e0 = 0.837 + amp, sig2_2 = 0.298 + sig2_1, sig2_2 = 0.276 + sig2_1, sig2_3 = 0.213 + amp, sig2_3 = 0.213 + c3_1, sig2_3 = -0.189 + alpha, amp = 0.185 + alpha, sig2_1 = 0.172 + c3_1, sig2_2 = 0.168 + alpha, sig2_2 = 0.152 + alpha, sig2_3 = -0.148 + amp, del_e0 = 0.148 + amp, c3_1 = 0.145 + del_e0, sig2_1 = 0.143 + c3_1, sig2_1 = 0.124 + +[[Dataset]] + unique_id = 'ds3qtgjs' + fit space = 'r' + r-range = 1.400, 3.500 + k-range = 3.000, 17.000 + k window, dk = 'kaiser', 4.000 + paths used in fit = ['feff0001.dat', 'feff0002.dat', 'feff0003.dat'] + k-weight = 2 + epsilon_k = Array(mean=0.0017550, std=0.0058069) + epsilon_r = 0.0589866 + n_independent = 19.717 [[Paths]] - = Path 'p3tf6mewgr' = Cu K Edge + = Path 'p3tf6mewg' = Cu K Edge feffdat file = feff0001.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) @@ -66,7 +66,7 @@ sigma2 = 0.0086805 +/- 2.8390e-4 := 'sig2_1' third = 1.4440e-4 +/- 8.2197e-5 := 'c3_1' - = Path 'pj3m4qvfyl' = Cu K Edge + = Path 'pj3m4qvfy' = Cu K Edge feffdat file = feff0002.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) @@ -79,7 +79,7 @@ deltar = 0.0101300 +/- 0.0107377 := 'alpha*reff' sigma2 = 0.0131046 +/- 0.0011358 := 'sig2_2' - = Path 'pibakl7xet' = Cu K Edge + = Path 'pibakl7xe' = Cu K Edge feffdat file = feff0003.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) diff --git a/examples/feffit/doc_feffit3.out b/examples/feffit/doc_feffit3.out index 43e6b52c9..1d7011c4a 100644 --- a/examples/feffit/doc_feffit3.out +++ b/examples/feffit/doc_feffit3.out @@ -1,63 +1,42 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 25 - n_variables = 5 - n_data_points = 390 - n_independent = 56.4760609 - chi_square = 2971.58732 - reduced chi_square = 57.7275586 - r-factor = 0.01637720 - Akaike info crit = 233.816589 - Bayesian info crit = 243.985673 - -[[Datasets (3)]] - unique_id = 'd3hdjp7v' - fit space = 'r' - r-range = 1.400, 3.400 - k-range = 3.000, 17.000 - k window, dk = 'kaiser', 4.000 - paths used in fit = ['feff0001.dat'] - k-weight = 2 - epsilon_k = Array(mean=0.0011450, std=9.5319e-4) - epsilon_r = 0.0384855 - n_independent = 18.825 - unique_id = 'd3s3lmwo' - fit space = 'r' - r-range = 1.400, 3.400 - k-range = 3.000, 17.000 - k window, dk = 'kaiser', 4.000 - paths used in fit = ['feff0001.dat'] - k-weight = 2 - epsilon_k = Array(mean=0.0010796, std=0.0010404) - epsilon_r = 0.0362874 - n_independent = 18.825 - unique_id = 'dcultf7u' - fit space = 'r' - r-range = 1.400, 3.400 - k-range = 3.000, 17.000 - k window, dk = 'kaiser', 4.000 - paths used in fit = ['feff0001.dat'] - k-weight = 2 - epsilon_k = Array(mean=7.3213e-4, std=0.0013137) - epsilon_r = 0.0246074 - n_independent = 18.825 + n_function_calls = 25 + n_variables = 5 + n_data_points = 390 + n_independent = 56.4760609 + chi_square = 2971.58732 + reduced chi_square = 57.7275586 + r-factor = 0.01637720 + Akaike info crit = 233.816589 + Bayesian info crit = 243.985673 [[Variables]] - alpha = 5.2689e-6 +/- 6.6632e-6 (init= 0.0000000) - amp = 0.8887003 +/- 0.0307540 (init= 1.0000000) - del_e0 = 5.3725821 +/- 0.6137863 (init= 2.0000000) - dr_off = -3.0870e-4 +/- 0.0027204 (init= 0.0000000) - theta = 233.13561 +/- 8.0815181 (init= 250.00000) + alpha = 5.2689e-6 +/- 6.6632e-6 (init= 0.0000000) + amp = 0.8887003 +/- 0.0307540 (init= 1.0000000) + del_e0 = 5.3725821 +/- 0.6137863 (init= 2.0000000) + dr_off = -3.0870e-4 +/- 0.0027204 (init= 0.0000000) + theta = 233.13561 +/- 8.0815181 (init= 250.00000) + +[[Correlations]] (unreported correlations are < 0.100) + amp, theta = -0.854 + del_e0, dr_off = 0.799 + alpha, dr_off = -0.380 + alpha, del_e0 = 0.106 -[[Correlations]] (unreported correlations are < 0.100) - amp, theta = -0.854 - del_e0, dr_off = 0.799 - alpha, dr_off = -0.380 - alpha, del_e0 = 0.106 +[[Dataset 1 of 3]] + unique_id = 'd3hdjp7v' + fit space = 'r' + r-range = 1.400, 3.400 + k-range = 3.000, 17.000 + k window, dk = 'kaiser', 4.000 + paths used in fit = ['feff0001.dat'] + k-weight = 2 + epsilon_k = Array(mean=0.0011450, std=9.5319e-4) + epsilon_r = 0.0384855 + n_independent = 18.825 [[Paths]] - dataset 1: - = Path 'p3tf6mewgr' = Cu K Edge + = Path 'p3tf6mewg' = Cu K Edge feffdat file = feff0001.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) @@ -66,12 +45,24 @@ degen = 12.000000 n*s02 = 0.8887003 +/- 0.0307540 := 'amp' e0 = 5.3725821 +/- 0.6137863 := 'del_e0' - r = 2.5495049 +/- 0.0029349 := 'reff + dr_off + 10*alpha*reff' - deltar = 0.0017049 +/- 0.0029349 := 'dr_off + 10*alpha*reff' - sigma2 = 0.0050293 +/- 2.9423e-4 := 'sigma2_eins(10, theta)' + r = 2.5476255 +/- 0.0026605 := 'reff + dr_off + 10*alpha*reff' + deltar = -1.7446e-4 +/- 0.0026605 := 'dr_off + 10*alpha*reff' + sigma2 = 0.0032743 +/- 1.1350e-4 := 'sigma2_eins(10, theta)' - dataset 2: - = Path 'p3tf6mewgr' = Cu K Edge +[[Dataset 2 of 3]] + unique_id = 'd3s3lmwo' + fit space = 'r' + r-range = 1.400, 3.400 + k-range = 3.000, 17.000 + k window, dk = 'kaiser', 4.000 + paths used in fit = ['feff0001.dat'] + k-weight = 2 + epsilon_k = Array(mean=0.0010796, std=0.0010404) + epsilon_r = 0.0362874 + n_independent = 18.825 + +[[Paths]] + = Path 'p3tf6mewg' = Cu K Edge feffdat file = feff0001.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) @@ -80,12 +71,24 @@ degen = 12.000000 n*s02 = 0.8887003 +/- 0.0307540 := 'amp' e0 = 5.3725821 +/- 0.6137863 := 'del_e0' - r = 2.5495049 +/- 0.0029349 := 'reff + dr_off + 50*alpha*reff' - deltar = 0.0017049 +/- 0.0029349 := 'dr_off + 50*alpha*reff' - sigma2 = 0.0050293 +/- 2.9423e-4 := 'sigma2_eins(50, theta)' + r = 2.5481625 +/- 0.0025227 := 'reff + dr_off + 50*alpha*reff' + deltar = 3.6251e-4 +/- 0.0025227 := 'dr_off + 50*alpha*reff' + sigma2 = 0.0033367 +/- 1.2585e-4 := 'sigma2_eins(50, theta)' - dataset 3: - = Path 'p3tf6mewgr' = Cu K Edge +[[Dataset 3 of 3]] + unique_id = 'dcultf7u' + fit space = 'r' + r-range = 1.400, 3.400 + k-range = 3.000, 17.000 + k window, dk = 'kaiser', 4.000 + paths used in fit = ['feff0001.dat'] + k-weight = 2 + epsilon_k = Array(mean=7.3213e-4, std=0.0013137) + epsilon_r = 0.0246074 + n_independent = 18.825 + +[[Paths]] + = Path 'p3tf6mewg' = Cu K Edge feffdat file = feff0001.dat, from feff run '' geometry atom x y z ipot Cu 0.0000, 0.0000, 0.0000 0 (absorber) diff --git a/examples/feffit/doc_feffit4.out b/examples/feffit/doc_feffit4.out index 7641467d6..e804c9a67 100644 --- a/examples/feffit/doc_feffit4.out +++ b/examples/feffit/doc_feffit4.out @@ -1,54 +1,54 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 65 - n_variables = 7 - n_data_points = 144 - n_independent = 17.1064802 - chi_square = 65.0941583 - reduced chi_square = 6.44083368 - r-factor = 0.01751179 - Akaike info crit = 36.8607146 - Bayesian info crit = 42.7369161 - -[[Dataset]] - dataset unique_id = 'dp3elp4f' - fit space = 'r' - r-range = 1.000, 3.200 - k-range = 2.000, 13.500 - k window, dk = 'kaiser', 3.000 - paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] - k-weight = 3 - epsilon_k = Array(mean=7.7091e-4, std=0.0018654) - epsilon_r = 0.1661505 - n_independent = 17.106 + n_function_calls = 65 + n_variables = 7 + n_data_points = 144 + n_independent = 17.1064802 + chi_square = 65.0941583 + reduced chi_square = 6.44083368 + r-factor = 0.01751179 + Akaike info crit = 36.8607146 + Bayesian info crit = 42.7369161 [[Variables]] - de0 = -1.4741656 +/- 1.1341834 (init= 0.1000000) - delr_1 = -0.0293664 +/- 0.0107585 (init= 0.0000000) - delr_2 = 0.0478234 +/- 0.0083873 (init= 0.0000000) - n1 = 5.5325714 +/- 1.2512685 (init= 6.0000000) - n2 = 11.419642 +/- 1.5349979 (init= 12.000000) - sig2_1 = 0.0120204 +/- 0.0027069 (init= 0.0020000) - sig2_2 = 0.0125357 +/- 0.0011737 (init= 0.0020000) + de0 = -1.4741656 +/- 1.1341834 (init= 0.1000000) + delr_1 = -0.0293664 +/- 0.0107585 (init= 0.0000000) + delr_2 = 0.0478234 +/- 0.0083873 (init= 0.0000000) + n1 = 5.5325714 +/- 1.2512685 (init= 6.0000000) + n2 = 11.419642 +/- 1.5349979 (init= 12.000000) + sig2_1 = 0.0120204 +/- 0.0027069 (init= 0.0020000) + sig2_2 = 0.0125357 +/- 0.0011737 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.100) - n2, sig2_2 = 0.939 - de0, delr_2 = 0.919 - n1, sig2_1 = 0.897 - de0, delr_1 = 0.582 - delr_1, delr_2 = 0.539 - delr_1, sig2_1 = 0.220 - delr_1, n1 = 0.190 - delr_2, sig2_2 = 0.184 - de0, n1 = -0.178 - delr_2, n2 = 0.159 - delr_2, n1 = -0.156 - de0, sig2_1 = -0.125 - delr_2, sig2_1 = -0.107 - n1, n2 = 0.106 +[[Correlations]] (unreported correlations are < 0.100) + n2, sig2_2 = 0.939 + de0, delr_2 = 0.919 + n1, sig2_1 = 0.897 + de0, delr_1 = 0.582 + delr_1, delr_2 = 0.539 + delr_1, sig2_1 = 0.220 + delr_1, n1 = 0.190 + delr_2, sig2_2 = 0.184 + de0, n1 = -0.178 + delr_2, n2 = 0.159 + delr_2, n1 = -0.156 + de0, sig2_1 = -0.125 + delr_2, sig2_1 = -0.107 + n1, n2 = 0.106 + +[[Dataset]] + unique_id = 'dp3elp4f' + fit space = 'r' + r-range = 1.000, 3.200 + k-range = 2.000, 13.500 + k window, dk = 'kaiser', 3.000 + paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] + k-weight = 3 + epsilon_k = Array(mean=7.7091e-4, std=0.0018654) + epsilon_r = 0.1661505 + n_independent = 17.106 [[Paths]] - = Path 'pjuumtmhel' = Fe K Edge + = Path 'pjuumtmhe' = Fe K Edge feffdat file = feff_feo01.dat, from feff run '' geometry atom x y z ipot Fe 0.0000, 0.0000, 0.0000 0 (absorber) @@ -61,7 +61,7 @@ deltar = -0.0293664 +/- 0.0107585 := 'delr_1' sigma2 = 0.0120204 +/- 0.0027069 := 'sig2_1' - = Path 'p7etnrdqri' = Fe K Edge + = Path 'p7etnrdqr' = Fe K Edge feffdat file = feff_feo02.dat, from feff run '' geometry atom x y z ipot Fe 0.0000, 0.0000, 0.0000 0 (absorber) diff --git a/examples/feffit/doc_feffit5_k.out b/examples/feffit/doc_feffit5_k.out index fcf5a1227..3fc1990f4 100644 --- a/examples/feffit/doc_feffit5_k.out +++ b/examples/feffit/doc_feffit5_k.out @@ -1,40 +1,40 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 49 - n_variables = 7 - n_data_points = 230 - n_independent = 17.1064802 - chi_square = 7475630.83 - reduced chi_square = 739686.879 - r-factor = 0.14252235 - Akaike info crit = 236.173863 - Bayesian info crit = 242.050064 - -[[Dataset]] - dataset unique_id = 'dp3elp4f' - fit space = 'k' - r-range = 1.000, 3.200 - k-range = 2.000, 13.500 - k window, dk = 'kaiser', 3.000 - paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] - k-weight = 2 - epsilon_k = Array(mean=8.0600e-4, std=0.0018556) - epsilon_r = 0.0152248 - n_independent = 17.106 + n_function_calls = 49 + n_variables = 7 + n_data_points = 230 + n_independent = 17.1064802 + chi_square = 7475630.83 + reduced chi_square = 739686.879 + r-factor = 0.14252235 + Akaike info crit = 236.173863 + Bayesian info crit = 242.050064 [[Variables]] - de0 = -1.8793985 +/- 2.5285324 (init= 0.1000000) - delr_1 = -0.0289654 +/- 0.0316899 (init= 0.0000000) - delr_2 = 0.0447253 +/- 0.0247363 (init= 0.0000000) - n1 = 5.0954909 +/- 2.4602584 (init= 6.0000000) - n2 = 12.796196 +/- 5.1180205 (init= 12.000000) - sig2_1 = 0.0104509 +/- 0.0077429 (init= 0.0020000) - sig2_2 = 0.0135419 +/- 0.0042588 (init= 0.0020000) + de0 = -1.8793985 +/- 2.5285324 (init= 0.1000000) + delr_1 = -0.0289654 +/- 0.0316899 (init= 0.0000000) + delr_2 = 0.0447253 +/- 0.0247363 (init= 0.0000000) + n1 = 5.0954909 +/- 2.4602584 (init= 6.0000000) + n2 = 12.796196 +/- 5.1180205 (init= 12.000000) + sig2_1 = 0.0104509 +/- 0.0077429 (init= 0.0020000) + sig2_2 = 0.0135419 +/- 0.0042588 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.500) - n2, sig2_2 = 0.924 - n1, sig2_1 = 0.881 - de0, delr_2 = 0.870 - de0, delr_1 = 0.671 - delr_1, delr_2 = 0.591 +[[Correlations]] (unreported correlations are < 0.500) + n2, sig2_2 = 0.924 + n1, sig2_1 = 0.881 + de0, delr_2 = 0.870 + de0, delr_1 = 0.671 + delr_1, delr_2 = 0.591 + +[[Dataset]] + unique_id = 'dp3elp4f' + fit space = 'k' + r-range = 1.000, 3.200 + k-range = 2.000, 13.500 + k window, dk = 'kaiser', 3.000 + paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] + k-weight = 2 + epsilon_k = Array(mean=8.0600e-4, std=0.0018556) + epsilon_r = 0.0152248 + n_independent = 17.106 ======================================================= diff --git a/examples/feffit/doc_feffit5_q.out b/examples/feffit/doc_feffit5_q.out index 318903d51..12c86ad27 100644 --- a/examples/feffit/doc_feffit5_q.out +++ b/examples/feffit/doc_feffit5_q.out @@ -1,40 +1,40 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 65 - n_variables = 7 - n_data_points = 230 - n_independent = 17.1064802 - chi_square = 156.610624 - reduced chi_square = 15.4960600 - r-factor = 0.01377419 - Akaike info crit = 51.8789694 - Bayesian info crit = 57.7551708 - -[[Dataset]] - dataset unique_id = 'dp3elp4f' - fit space = 'q' - r-range = 1.000, 3.200 - k-range = 2.000, 13.500 - k window, dk = 'kaiser', 3.000 - paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] - k-weight = 3 - epsilon_k = Array(mean=7.7091e-4, std=0.0018654) - epsilon_r = 0.1661505 - n_independent = 17.106 + n_function_calls = 65 + n_variables = 7 + n_data_points = 230 + n_independent = 17.1064802 + chi_square = 156.610624 + reduced chi_square = 15.4960600 + r-factor = 0.01377419 + Akaike info crit = 51.8789694 + Bayesian info crit = 57.7551708 [[Variables]] - de0 = -1.4418400 +/- 1.0075979 (init= 0.1000000) - delr_1 = -0.0291715 +/- 0.0095887 (init= 0.0000000) - delr_2 = 0.0481107 +/- 0.0074656 (init= 0.0000000) - n1 = 5.7051162 +/- 1.1572056 (init= 6.0000000) - n2 = 11.473040 +/- 1.3721353 (init= 12.000000) - sig2_1 = 0.0123871 +/- 0.0024544 (init= 0.0020000) - sig2_2 = 0.0125804 +/- 0.0010458 (init= 0.0020000) + de0 = -1.4418400 +/- 1.0075979 (init= 0.1000000) + delr_1 = -0.0291715 +/- 0.0095887 (init= 0.0000000) + delr_2 = 0.0481107 +/- 0.0074656 (init= 0.0000000) + n1 = 5.7051162 +/- 1.1572056 (init= 6.0000000) + n2 = 11.473040 +/- 1.3721353 (init= 12.000000) + sig2_1 = 0.0123871 +/- 0.0024544 (init= 0.0020000) + sig2_2 = 0.0125804 +/- 0.0010458 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.500) - n2, sig2_2 = 0.940 - de0, delr_2 = 0.919 - n1, sig2_1 = 0.900 - de0, delr_1 = 0.585 - delr_1, delr_2 = 0.542 +[[Correlations]] (unreported correlations are < 0.500) + n2, sig2_2 = 0.940 + de0, delr_2 = 0.919 + n1, sig2_1 = 0.900 + de0, delr_1 = 0.585 + delr_1, delr_2 = 0.542 + +[[Dataset]] + unique_id = 'dp3elp4f' + fit space = 'q' + r-range = 1.000, 3.200 + k-range = 2.000, 13.500 + k window, dk = 'kaiser', 3.000 + paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] + k-weight = 3 + epsilon_k = Array(mean=7.7091e-4, std=0.0018654) + epsilon_r = 0.1661505 + n_independent = 17.106 ======================================================= diff --git a/examples/feffit/doc_feffit5_r.out b/examples/feffit/doc_feffit5_r.out index ad5d0c247..f91047af2 100644 --- a/examples/feffit/doc_feffit5_r.out +++ b/examples/feffit/doc_feffit5_r.out @@ -1,40 +1,40 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 65 - n_variables = 7 - n_data_points = 144 - n_independent = 17.1064802 - chi_square = 65.0941583 - reduced chi_square = 6.44083368 - r-factor = 0.01751179 - Akaike info crit = 36.8607146 - Bayesian info crit = 42.7369161 - -[[Dataset]] - dataset unique_id = 'dp3elp4f' - fit space = 'r' - r-range = 1.000, 3.200 - k-range = 2.000, 13.500 - k window, dk = 'kaiser', 3.000 - paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] - k-weight = 3 - epsilon_k = Array(mean=7.7091e-4, std=0.0018654) - epsilon_r = 0.1661505 - n_independent = 17.106 + n_function_calls = 65 + n_variables = 7 + n_data_points = 144 + n_independent = 17.1064802 + chi_square = 65.0941583 + reduced chi_square = 6.44083368 + r-factor = 0.01751179 + Akaike info crit = 36.8607146 + Bayesian info crit = 42.7369161 [[Variables]] - de0 = -1.4741656 +/- 1.1341834 (init= 0.1000000) - delr_1 = -0.0293664 +/- 0.0107585 (init= 0.0000000) - delr_2 = 0.0478234 +/- 0.0083873 (init= 0.0000000) - n1 = 5.5325714 +/- 1.2512685 (init= 6.0000000) - n2 = 11.419642 +/- 1.5349979 (init= 12.000000) - sig2_1 = 0.0120204 +/- 0.0027069 (init= 0.0020000) - sig2_2 = 0.0125357 +/- 0.0011737 (init= 0.0020000) + de0 = -1.4741656 +/- 1.1341834 (init= 0.1000000) + delr_1 = -0.0293664 +/- 0.0107585 (init= 0.0000000) + delr_2 = 0.0478234 +/- 0.0083873 (init= 0.0000000) + n1 = 5.5325714 +/- 1.2512685 (init= 6.0000000) + n2 = 11.419642 +/- 1.5349979 (init= 12.000000) + sig2_1 = 0.0120204 +/- 0.0027069 (init= 0.0020000) + sig2_2 = 0.0125357 +/- 0.0011737 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.500) - n2, sig2_2 = 0.939 - de0, delr_2 = 0.919 - n1, sig2_1 = 0.897 - de0, delr_1 = 0.582 - delr_1, delr_2 = 0.539 +[[Correlations]] (unreported correlations are < 0.500) + n2, sig2_2 = 0.939 + de0, delr_2 = 0.919 + n1, sig2_1 = 0.897 + de0, delr_1 = 0.582 + delr_1, delr_2 = 0.539 + +[[Dataset]] + unique_id = 'dp3elp4f' + fit space = 'r' + r-range = 1.000, 3.200 + k-range = 2.000, 13.500 + k window, dk = 'kaiser', 3.000 + paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] + k-weight = 3 + epsilon_k = Array(mean=7.7091e-4, std=0.0018654) + epsilon_r = 0.1661505 + n_independent = 17.106 ======================================================= diff --git a/examples/feffit/doc_feffit5_w.out b/examples/feffit/doc_feffit5_w.out index f003fe64b..14e7c86fd 100644 --- a/examples/feffit/doc_feffit5_w.out +++ b/examples/feffit/doc_feffit5_w.out @@ -1,39 +1,39 @@ =================== FEFFIT RESULTS ==================== [[Statistics]] - n_function_calls = 57 - n_variables = 7 - n_data_points = 33120 - n_independent = 17.1064802 - chi_square = 821.972624 - reduced chi_square = 81.3312454 - r-factor = 0.00997260 - Akaike info crit = 80.2405636 - Bayesian info crit = 86.1167651 - -[[Dataset]] - dataset unique_id = 'dp3elp4f' - fit space = 'w' - r-range = 1.000, 3.200 - k-range = 2.000, 13.500 - k window, dk = 'kaiser', 3.000 - paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] - k-weight = 2 - epsilon_k = Array(mean=8.0600e-4, std=0.0018556) - epsilon_r = 0.0152248 - n_independent = 17.106 + n_function_calls = 57 + n_variables = 7 + n_data_points = 33120 + n_independent = 17.1064802 + chi_square = 821.972624 + reduced chi_square = 81.3312454 + r-factor = 0.00997260 + Akaike info crit = 80.2405636 + Bayesian info crit = 86.1167651 [[Variables]] - de0 = -1.9317738 +/- 0.8311298 (init= 0.1000000) - delr_1 = -0.0304701 +/- 0.0088744 (init= 0.0000000) - delr_2 = 0.0444336 +/- 0.0069506 (init= 0.0000000) - n1 = 6.0577126 +/- 1.2924562 (init= 6.0000000) - n2 = 12.185287 +/- 1.2509910 (init= 12.000000) - sig2_1 = 0.0131212 +/- 0.0029094 (init= 0.0020000) - sig2_2 = 0.0131448 +/- 0.0010218 (init= 0.0020000) + de0 = -1.9317738 +/- 0.8311298 (init= 0.1000000) + delr_1 = -0.0304701 +/- 0.0088744 (init= 0.0000000) + delr_2 = 0.0444336 +/- 0.0069506 (init= 0.0000000) + n1 = 6.0577126 +/- 1.2924562 (init= 6.0000000) + n2 = 12.185287 +/- 1.2509910 (init= 12.000000) + sig2_1 = 0.0131212 +/- 0.0029094 (init= 0.0020000) + sig2_2 = 0.0131448 +/- 0.0010218 (init= 0.0020000) -[[Correlations]] (unreported correlations are < 0.500) - n2, sig2_2 = 0.944 - de0, delr_2 = 0.928 - n1, sig2_1 = 0.917 - de0, delr_1 = 0.519 +[[Correlations]] (unreported correlations are < 0.500) + n2, sig2_2 = 0.944 + de0, delr_2 = 0.928 + n1, sig2_1 = 0.917 + de0, delr_1 = 0.519 + +[[Dataset]] + unique_id = 'dp3elp4f' + fit space = 'w' + r-range = 1.000, 3.200 + k-range = 2.000, 13.500 + k window, dk = 'kaiser', 3.000 + paths used in fit = ['feff_feo01.dat', 'feff_feo02.dat'] + k-weight = 2 + epsilon_k = Array(mean=8.0600e-4, std=0.0018556) + epsilon_r = 0.0152248 + n_independent = 17.106 ======================================================= diff --git a/history/README b/history/README new file mode 100644 index 000000000..ccff8b721 --- /dev/null +++ b/history/README @@ -0,0 +1 @@ +this folder contains code that may be of historical interest, but is not currently used diff --git a/modules/__init__.py b/history/modules/__init__.py similarity index 100% rename from modules/__init__.py rename to history/modules/__init__.py diff --git a/modules/gsemap.lar b/history/modules/gsemap.lar similarity index 100% rename from modules/gsemap.lar rename to history/modules/gsemap.lar diff --git a/modules/xafs_plots.lar b/history/modules/xafs_plots.lar similarity index 100% rename from modules/xafs_plots.lar rename to history/modules/xafs_plots.lar diff --git a/plugins/README.TXT b/history/plugins/README.TXT similarity index 100% rename from plugins/README.TXT rename to history/plugins/README.TXT diff --git a/plugins/__init__.py b/history/plugins/__init__.py similarity index 100% rename from plugins/__init__.py rename to history/plugins/__init__.py diff --git a/larch/wxxrd/XRD1Dviewer.py b/history/xrd/XRD1Dviewer.py similarity index 100% rename from larch/wxxrd/XRD1Dviewer.py rename to history/xrd/XRD1Dviewer.py diff --git a/larch/xsw/YongsCode/SimpleParratt.py b/history/xsw/YongsCode/SimpleParratt.py similarity index 100% rename from larch/xsw/YongsCode/SimpleParratt.py rename to history/xsw/YongsCode/SimpleParratt.py diff --git a/larch/xsw/YongsCode/SimpleParratt_Panel.py b/history/xsw/YongsCode/SimpleParratt_Panel.py similarity index 100% rename from larch/xsw/YongsCode/SimpleParratt_Panel.py rename to history/xsw/YongsCode/SimpleParratt_Panel.py diff --git a/larch/xsw/YongsCode/fluo_det.py b/history/xsw/YongsCode/fluo_det.py similarity index 100% rename from larch/xsw/YongsCode/fluo_det.py rename to history/xsw/YongsCode/fluo_det.py diff --git a/larch/xsw/YongsCode/fluo_panel.py b/history/xsw/YongsCode/fluo_panel.py similarity index 100% rename from larch/xsw/YongsCode/fluo_panel.py rename to history/xsw/YongsCode/fluo_panel.py diff --git a/larch/xsw/YongsCode/setup.py b/history/xsw/YongsCode/setup.py similarity index 100% rename from larch/xsw/YongsCode/setup.py rename to history/xsw/YongsCode/setup.py diff --git a/installers/GetLarch.sh b/installers/GetLarch.sh index 4128d7d20..471964cee 100644 --- a/installers/GetLarch.sh +++ b/installers/GetLarch.sh @@ -5,6 +5,7 @@ ## all required packages with mamba or pip prefix=$HOME/xraylarch +larchurl='xraylarch[larix]' uname=`uname` if [ $uname == Darwin ]; then @@ -59,7 +60,6 @@ if [ -d $prefix ] ; then exit 0 fi -larchurl='xraylarch[larix]' echo "############## " | tee $logfile echo "## This script will install Larch for $uname to $prefix" | tee -a $logfile @@ -90,9 +90,9 @@ echo "#> $prefix/bin/mamba install -yc conda-forge $cforge_pkgs " | tee -a $logf $prefix/bin/mamba install -y -c conda-forge $cforge_pkgs $prefix/bin/mamba list -echo "##Installing xraylarch as 'pip install $larchurl'" | tee -a $logfile -echo "#> $prefix/bin/pip install $larchurl"| tee -a $logfile -$prefix/bin/pip install $larchurl | tee -a $logfile +echo "##Installing xraylarch as 'pip install \"$larchurl\"'" | tee -a $logfile +echo "#> $prefix/bin/pip install \"$larchurl\""| tee -a $logfile +$prefix/bin/pip install "$larchurl" | tee -a $logfile ## create desktop shortcuts echo "## Creating desktop shortcuts" diff --git a/larch/apps.py b/larch/apps.py index ee4b860bb..40e0dfc1f 100644 --- a/larch/apps.py +++ b/larch/apps.py @@ -4,7 +4,7 @@ import os import sys import locale - +import inspect import shutil from argparse import ArgumentParser @@ -32,109 +32,137 @@ def use_mpl_wxagg(): if HAS_WXPYTHON: try: matplotlib.use('WXAgg', force=True) - return True except ImportError: pass - return False def set_locale(): - """set locale to 'C' for these applications, - may need some improvement!!""" + """set locale to 'C' for these applications""" locale.setlocale(locale.LC_ALL, 'C') -# App Name, icon, terminal, Script / pyshortcuts command -MainApps = (('Larch CLI', 'larch', True, 'larch'), - ('Larch Updater', 'larch', True, '_ -m pip install --upgrade xraylarch'), - ('Larch GUI', 'larch', False, 'larch --wxgui'), - ('XAS Viewer', 'onecone', False, 'xas_viewer'), - ('Larix', 'onecone', False, 'larix'), - ('GSE MapViewer', 'gse_xrfmap', False, 'gse_mapviewer'), - ('XRF Viewer', 'ptable', False, 'larch_xrf'), - ('XRD1D Viewer', 'larch', False, 'larch_xrd1d') ) - -def make_desktop_shortcuts(): - """make (or remake) desktop shortcuts for Larch apps""" - larchdir = os.path.join(get_desktop(), 'Larch') - if os.path.exists(larchdir): - shutil.rmtree(larchdir) - - bindir = 'Scripts' if uname == 'win' else 'bin' - bindir = os.path.join(sys.prefix, bindir) - for appname, icon, term, script in MainApps: - kwargs = {'folder': 'Larch', 'terminal': term, 'name': appname} - if not script.startswith('_'): - script = os.path.normpath(os.path.join(bindir, script)) - icon = os.path.join(icondir, icon) +class LarchApp(object): + """wrapper for Larh application""" + def __init__(self, name, script, icon=None, description=None, + is_wxapp=True, filetype=None): + self.name = name + self.script = script + self.is_wxapp = is_wxapp + self.description = description or name + self.icon = icon or 'larch' + self.filetype = filetype or 'data file' + + def make_desktop_shortcut(self, folder='Larch'): + """make (or remake) desktop shortcuts for Larch apps""" + bindir = 'Scripts' if uname == 'win' else 'bin' + bindir = os.path.join(sys.prefix, bindir) + print("BINDIR ") + script = self.script + if not self.script.startswith('_'): + script = os.path.normpath(os.path.join(bindir, self.script)) + + + icon = os.path.join(icondir, self.icon) if isinstance(ico_ext, (list, tuple)): for ext in ico_ext: - ticon = f"{icon:s}.{ext:s}" + ticon = f"{self.icon:s}.{ext:s}" if os.path.exists(ticon): icon = ticon - make_shortcut(script, icon=icon, **kwargs) -def make_cli(description='run larch program', filedesc='data file'): - "make commandline apps" - parser = ArgumentParser(description=description) - parser.add_argument('filename', nargs='?', help=filedesc) - args = parser.parse_args() - filename = None - if 'filename' in args and args.filename is not None: - filename = os.path.abspath(args.filename) - return {'filename': filename} + print("MAKE D short ", self.name, script) + make_shortcut(script, name=self.name, folder=folder, icon=icon, + description=self.description, + terminal=(not self.is_wxapp)) + + + def prep_cli(self): + parser = ArgumentParser(description=self.description) + parser.add_argument('filename', nargs='?', help=self.filetype) + args = parser.parse_args() + self.filename = None + if 'filename' in args and args.filename is not None: + self.filename = os.path.abspath(args.filename) + + if self.is_wxapp: + set_locale() + use_mpl_wxagg() + + +# # App Name, icon, terminal, Script / pyshortcuts command, Description +# MainApps = (('Larch CLI', 'larch', True, 'larch', 'Basic Command-line interface for Larch'), +# ('Larch Updater', 'larch', True, '_ -m pip install --upgrade xraylarch', 'Larch Updatar'), +# ('Larch GUI', 'larch', False, 'larch --wxgui', 'Enhanced Command-line interface for Larch'), +# ('XAS Viewer', 'onecone', False, 'larix', 'XANES and EXAFS Analysis GUI for Larch'), +# ('Larix', 'onecone', False, 'larix', 'XANES and EXAFS Analysis GUI for Larch'), +# ('GSE MapViewer', 'gse_xrfmap', False, 'gse_mapviewer', 'XRF Map Viewing and Analysis'), +# ('XRF Viewer', 'ptable', False, 'larch_xrf', 'X-ray FluorescenceData Viewing and Analysis'), +# ('XRD1D Viewer', 'larch', False, 'larch_xrd1d', 'X-ray Diffraction Data Viewing'), +# ) +# + + +LarchApps = { + 'larch': LarchApp(name='Larch CLI', script='larch', icon='larch', + description='Basic Command-line interface for Larch'), + 'Larch GUI': LarchApp(name='Larch GUI', script='larch --wxgui', icon='larch', + description='Enhanced Command-line interface for Larch'), + 'Larch Updater': LarchApp(name='Update Larch', + script='_ -m pip install --upgrade xraylarch', + icon='larch', + description='Larch Updater', is_wxapp=False), + + 'Larix': LarchApp(name='Larix', script='larix', icon='onecone', + description='XANES and EXAFS Analysis GUI for Larch'), + 'XAS Viewer': LarchApp(name='XAS Viewer', script='larix', icon='onecone', + description='XANES and EXAFS Analysis GUI for Larch'), + 'XRFMap Viewer': LarchApp(name='XRFMap Viewer', script='gse_mapviewer', + icon='gse_xrfmap', filetype='XRM Map File (.h5)', + description='XRFMap Viewing and Analysis'), + 'XRF Viewer': LarchApp(name='XRF Viewer', script='larch_xrf', icon='ptable', + description='X-ray FluorescenceData Viewing and Analysis'), + 'XRD1D Viewer': LarchApp(name='XRD1D Viewer', script='larch_xrd1d', icon='larch', + description='X-ray Diffraction Data Viewing'), + } + # entry points: def run_gse_mapviewer(): - """Mapviewer""" - set_locale() - use_mpl_wxagg() - kwargs = make_cli(description="Larch's XRM Map Viewer and Analysis Program", - filedesc='XRM Map File (.h5)') + "XRFMap Viewer" + app = LarchApps['XRFMap Viewer'] + app.prep_cli() from .wxmap import MapViewer - MapViewer(check_version=True, **kwargs).MainLoop() + MapViewer(check_version=True, title=app.description, + filename=app.filename).MainLoop() -def run_gse_dtcorrect(): - """GSE DT Correct """ - set_locale() - use_mpl_wxagg() - from .wxmap import DTViewer - DTViewer().MainLoop() def run_larix(): - """Larix (was XAS Viewer)""" - set_locale() - use_mpl_wxagg() - from .wxxas import XASViewer, LARIX_TITLE - kwargs = make_cli(description=LARIX_TITLE) - XASViewer(check_version=True, **kwargs).MainLoop() + """XANES and EXAFS Analysis GUI for Larch""" + app = LarchApps['Larix'] + app.prep_cli() + from .wxxas import XASViewer + XASViewer(check_version=True, filename=app.filename).MainLoop() run_xas_viewer = run_larix def run_larch_xrf(): - """ XRF Display""" - set_locale() - use_mpl_wxagg() - kwargs = make_cli(description="Larch's XRF Viewer and Analysis Program", - filedesc='MCA File (.mca)') + """X-ray FluorescenceData Viewing and Analysis""" + app = LarchApps['XRF Viewer'] + app.prep_cli() from .wxlib.xrfdisplay import XRFApp - XRFApp(**kwargs).MainLoop() + XRFApp(filename=app.filename).MainLoop() def run_epics_xrf(): - """XRF Display for Epics Detectors""" - set_locale() - use_mpl_wxagg() - IMPORT_OK = False + """XRF Viewing and Control for Epics XRF Detectors""" + app = LarchApps['XRF Viewer'] + app.prep_cli() try: from .epics import EpicsXRFApp - IMPORT_OK = True - except ImportError: - print("cannot import EpicsXRFApp: try `pip install xraylarch[epics]`") - if IMPORT_OK: EpicsXRFApp().MainLoop() + except ImportError: + print('cannot import EpicsXRFApp: try `pip install "xraylarch[epics]"`') def run_larch_xrd1d(): - """XRD Display for 1D patternss""" - set_locale() - use_mpl_wxagg() + """X-ray Diffraction Data Display""" + app = LarchApps['XRD1D Viewer'] + app.prep_cli() from .wxxrd import XRD1DApp XRD1DApp().MainLoop() @@ -145,6 +173,13 @@ def run_xrd2d_viewer(): from .wxxrd import XRD2DViewer XRD2DViewer().MainLoop() +def run_gse_dtcorrect(): + """GSE DT Correct """ + set_locale() + use_mpl_wxagg() + from .wxmap import DTViewer + DTViewer().MainLoop() + def run_feff6l(): "run feff6l" @@ -169,33 +204,19 @@ def run_larch(): commandline repl program or wxgui """ parser = ArgumentParser(description='run main larch program') - - parser.add_argument('-v', '--version', dest='version', action='store_true', - default=False, help='show version') - - parser.add_argument("-e", "--exec", dest="noshell", action="store_true", - default=False, help="execute script only, default = False") - - parser.add_argument("-q", "--quiet", dest="quiet", action="store_true", - default=False, help="set quiet mode, default = False") - - parser.add_argument("-x", "--nowx", dest="nowx", action="store_true", - default=False, help="set no wx graphics mode, default = False") - - parser.add_argument("-w", "--wxgui", dest="wxgui", default=False, - action='store_true', help="run Larch GUI") - - parser.add_argument("-m", "--makeicons", dest="makeicons", action="store_true", - default=False, help="create desktop icons") - - parser.add_argument('-u', '--update', dest='update', action='store_true', - default=False, help='update larch to the latest version') - - parser.add_argument("-r", "--remote", dest="server_mode", action="store_true", - default=False, help="run in remote server mode") - - parser.add_argument("-p", "--port", dest="port", default='4966', - help="port number for remote server") + sargs = (("-v", "--version", "version", False, "show version"), + ("-e", "--exec", "noshell", False, "execute script only"), + ("-q", "--quiet", "quiet", False, "set quiet mode"), + ("-x", "--nowx", "nowx", False, "set no wx graphics mode"), + ("-w", "--wxgui", "wxgui", False, "run Larch GUI"), + ("-m", "--makeicons", "makeicons", False, "create desktop icons"), + ("-u", "--update", "update", False, "update larch to the latest version"), + ("-r", "--remote", "server_mode", False, "run in remote server mode"), + ("-p", "--port", "port", "4966", "port number for remote server")) + + for opt, longopt, dest, default, help in sargs: + parser.add_argument(opt, longopt, dest=dest, action='store_true', + default=default, help=help) parser.add_argument('scripts', nargs='*', help='larch or python scripts to run on startup') @@ -212,7 +233,12 @@ def run_larch(): # create desktop icons if args.makeicons: - make_desktop_shortcuts() + larchdir = os.path.join(get_desktop(), 'Larch') + if os.path.exists(larchdir): + shutil.rmtree(larchdir) + + for n, app in LarchApps.items(): + app.make_desktop_shortcut() return # run updates diff --git a/larch/fitting/__init__.py b/larch/fitting/__init__.py index 4355bae75..de4264544 100644 --- a/larch/fitting/__init__.py +++ b/larch/fitting/__init__.py @@ -14,6 +14,8 @@ from lmfit.model import (ModelResult, save_model, load_model, save_modelresult, load_modelresult) from lmfit.confidence import f_compare + +from lmfit.printfuncs import gformat, getfloat_attr from uncertainties import ufloat, correlated_values from uncertainties import wrap as un_wrap @@ -30,6 +32,113 @@ def param_value(val): val = val.value return val +def format_param(par, length=10, with_initial=True): + value = repr(par) + if isParameter(par): + value = gformat(par.value, length=length) + if not par.vary and par.expr is None: + value = f"{value} (fixed)" + else: + stderr = 'unknown' + if par.stderr is not None: + stderr = gformat(par.stderr, length=length) + value = f"{value} +/-{stderr}" + if par.vary and par.expr is None and with_initial: + value = f"{value} (init={gformat(par.init_value, length=length)})" + if par.expr is not None: + value = f"{value} = '{par.expr}'" + return value + + +def stats_table(results, labels=None, csv_output=False, csv_delim=','): + """ + create a table comparing fit statistics for multiple fit results + """ + stats = {'number of variables': 'nvarys', + 'chi-square': 'chi_square', + 'reduced chi-square': 'chi2_reduced', + 'r-factor': 'rfactor', + 'Akaike Info Crit': 'aic', + 'Bayesian Info Crit': 'bic'} + + nfits = len(results) + if labels is not None: + if len(labels) != len(results): + raise ValueError('labels must be a list that is the same length as results') + + columns = [['Statistics']] + if labels is None: + labels = [f" Fit {i+1}" for i in range(nfits)] + for lab in labels: + columns.append([lab]) + + for sname, attr in stats.items(): + columns[0].append(sname) + for i, result in enumerate(results): + columns[i+1].append(getfloat_attr(result, attr)) + + return format_table_columns(columns, csv_output=csv_output, csv_delim=csv_delim) + +def paramgroups_table(pgroups, labels=None, csv_output=False, csv_delim=','): + """ + create a table comparing parameters from a Feffit Parameter Grooup for multiple fit results + """ + nfits = len(pgroups) + if labels is not None: + if len(labels) != len(pgroups): + raise ValueError('labels must be a list that is the same length as Parameter Groups') + + columns = [['Parameter']] + if labels is None: + labels = [f" Fit {i+1}" for i in range(nfits)] + for lab in labels: + columns.append([lab]) + + parnames = [] + for pgroup in pgroups: + for pname in dir(pgroup): + if pname not in parnames: + parnames.append(pname) + + for pname in parnames: + columns[0].append(pname) + for i, pgroup in enumerate(pgroups): + value = 'N/A' + par = getattr(pgroup, pname, None) + if par is not None: + value = format_param(par, length=10, with_initial=False) + columns[i+1].append(value) + + return format_table_columns(columns, csv_output=csv_output, csv_delim=csv_delim) + + +def format_table_columns(columns, csv_output=False, csv_delim=','): + hjoin, rjoin, edge = '+', '|', '|' + if csv_output: + hjoin, rjoin, edge = csv_delim, csv_delim, '' + + ncols = len(columns) + nrows = len(columns[0]) + slen = [2]*ncols + for i, col in enumerate(columns): + slen[i] = max(5, max([len(row) for row in col])) + + buff = [] + if not csv_output: + header = edge + hjoin.join(['-'*(lx+2) for lx in slen]) + edge + buff = [header] + + while len(columns[0]) > 0: + values = [c.pop(0) for c in columns] + row = rjoin.join([f" {l:{slen[i]}.{slen[i]}s} " for i, l in enumerate(values)]) + buff.append(edge + row + edge) + if not csv_output and len(buff) == 2: + buff.append(header) + if not csv_output: + buff.append(header) + return '\n'.join(buff) + + def f_test(ndata, nvars, chisquare, chisquare0, nfix=1): """return the F-test value for the following input values: f = f_test(ndata, nparams, chisquare, chisquare0, nfix=1) diff --git a/larch/io/athena_project.py b/larch/io/athena_project.py index 10c583a5e..f9a56b14e 100644 --- a/larch/io/athena_project.py +++ b/larch/io/athena_project.py @@ -553,7 +553,7 @@ def parse_jsonathena(text, filename): class AthenaGroup(Group): """A special Group for handling datasets loaded from Athena project files""" - def __init__(self, show_sel=False): + def __init__(self, show_sel=False, **kws): """Constructor Parameters @@ -562,7 +562,7 @@ def __init__(self, show_sel=False): show_sel : boolean, False if True, it shows the selection flag in HTML representation """ - super().__init__() + super().__init__(**kws) self.show_sel = show_sel def _repr_html_(self): @@ -600,14 +600,12 @@ def groups(self, groups): self._athena_groups = groups def __getitem__(self, key): - if isinstance(key, int): raise IndexError("AthenaGroup does not support integer indexing") return getattr(self, key) def __setitem__(self, key, value): - if isinstance(key, int): raise IndexError("AthenaGroup does not support integer indexing") diff --git a/larch/io/fileutils.py b/larch/io/fileutils.py index 746aa93cb..89d20d9fc 100644 --- a/larch/io/fileutils.py +++ b/larch/io/fileutils.py @@ -6,10 +6,12 @@ import os import sys -from random import seed, randrange +from random import Random from string import printable from ..utils.strutils import fix_filename, fix_varname, strip_quotes +rng = Random() + def asciikeys(adict): """ensure a dictionary has ASCII keys (and so can be an **kwargs)""" return dict((k.encode('ascii'), v) for k, v in adict.items()) @@ -28,14 +30,15 @@ def get_timestamp(with_t=False): time.strftime('%Y-%m-%dT%H:%M:%S') return time.strftime('%Y-%m-%d %H:%M:%S') -def random_string(n): +def random_string(n, rng_seed=None): """ random_string(n) generates a random string of length n, that will match: [a-z][a-z0-9](n-1) """ - seed(time.time()) - s = [printable[randrange(0,36)] for i in range(n-1)] - s.insert(0, printable[randrange(10,36)]) + if rng_seed is not None: + rng.seed(rng_seed) + s = [printable[rng.randrange(0, 36)] for i in range(n-1)] + s.insert(0, printable[rng.randrange(10, 36)]) return ''.join(s) def pathOf(dir, base, ext, delim='.'): diff --git a/larch/io/save_restore.py b/larch/io/save_restore.py index 81e41ea29..a589965b3 100644 --- a/larch/io/save_restore.py +++ b/larch/io/save_restore.py @@ -192,7 +192,6 @@ def read_session(fname): cmd_history = [] nsyms = nsym_expected = 0 section = symname = '_unknown_' - for line in lines: if line.startswith("##<"): section = line.replace('##<','').replace('>', '').strip().lower() @@ -210,7 +209,7 @@ def read_session(fname): try: symbols[symname] = decode4js(json.loads(line)) except: - print("decode failed:: ", symname, line[:150]) + print("decode failed:: ", symname, repr(line)[:50]) else: if line.startswith('##') and ':' in line: @@ -222,7 +221,7 @@ def read_session(fname): try: val = decode4js(json.loads(val)) except: - print("decode failed @## ", val[:150]) + print("decode failed @## ", repr(val)[:50]) config[key] = val return SessionStore(config, cmd_history, symbols) diff --git a/larch/math/learn_regress.py b/larch/math/learn_regress.py index c3dd715af..ec1d1ff84 100644 --- a/larch/math/learn_regress.py +++ b/larch/math/learn_regress.py @@ -10,6 +10,7 @@ try: from sklearn.cross_decomposition import PLSRegression from sklearn.model_selection import RepeatedKFold + from sklearn.preprocessing import StandardScaler from sklearn.linear_model import LassoLarsCV, LassoLars, Lasso HAS_SKLEARN = True except ImportError: @@ -88,7 +89,10 @@ def pls_train(groups, varname='valence', arrayname='norm', scale=True, cv = RepeatedKFold(n_splits=cv_folds, n_repeats=cv_repeats) for ctrain, ctest in cv.split(range(nvals)): model.fit(spectra[ctrain, :], ydat[ctrain]) - ypred = model.predict(spectra[ctest, :])[:, 0] + ypred = model.predict(spectra[ctest, :]) + if len(ypred.shape) == 2: + ypred = ypred[:, 0] + resid.extend((ypred - ydat[ctest]).tolist()) resid = np.array(resid) rmse_cv = np.sqrt( (resid**2).mean() ) @@ -97,7 +101,9 @@ def pls_train(groups, varname='valence', arrayname='norm', scale=True, model = PLSRegression(**kws) out = model.fit(spectra, ydat) - ypred = model.predict(spectra)[:, 0] + ypred = model.predict(spectra) + if len(ypred.shape) == 2: + ypred = ypred[:, 0] rmse = np.sqrt(((ydat - ypred)**2).mean()) @@ -160,11 +166,17 @@ def lasso_train(groups, varname='valence', arrayname='norm', alpha=None, ydat = np.array(ydat) nvals = len(groups) + ymean = norms = nonzero = None + spectra_scaled = spectra[:] + if fit_intercept and normalize: + s_scalar = StandardScaler().fit(spectra) + spectra_scaled = s_scalar.transform(spectra) - kws.update(dict(fit_intercept=fit_intercept, normalize=normalize)) + kws.update(dict(fit_intercept=fit_intercept)) creator = LassoLars if use_lars else Lasso model = None + npts = xdat.shape[0] rmse_cv = None if not skip_cv: if cv_folds is None: @@ -176,34 +188,37 @@ def lasso_train(groups, varname='valence', arrayname='norm', alpha=None, if alpha is None: lcvmod = LassoLarsCV(cv=cv, max_n_alphas=1e7, max_iter=1e7, eps=1.e-12, **kws) - lcvmod.fit(spectra, ydat) + lcvmod.fit(spectra_scaled, ydat) alpha = lcvmod.alpha_ - + # if normalize: + # alpha = alpha / np.sqrt(npts) model = creator(alpha=alpha, **kws) resid = [] for ctrain, ctest in cv.split(range(nvals)): - model.fit(spectra[ctrain, :], ydat[ctrain]) - ypred = model.predict(spectra[ctest, :]) + model.fit(spectra_scaled[ctrain, :], ydat[ctrain]) + ypred = model.predict(spectra_scaled[ctest, :]) resid.extend((ypred - ydat[ctest]).tolist()) resid = np.array(resid) rmse_cv = np.sqrt( (resid**2).mean() ) if alpha is None: cvmod = creator(**kws) - cvmod.fit(spectra, ydat) + cvmod.fit(spectra_scaled, ydat) alpha = cvmod.alpha_ + # alpha = alpha / np.sqrt(npts) if model is None: model = creator(alpha=alpha, **kws) # final fit without cross-validation - out = model.fit(spectra, ydat) - - ypred = model.predict(spectra) + out = model.fit(spectra_scaled, ydat) + # print("model fit out: ", out) + ypred = model.predict(spectra_scaled) rmse = np.sqrt(((ydat - ypred)**2).mean()) - return Group(x=xdat, spectra=spectra, ydat=ydat, ypred=ypred, + return Group(x=xdat, spectra=spectra, spectra_scaled=spectra_scaled, + ydat=ydat, ypred=ypred, alpha=alpha, active=model.active_, coefs=model.coef_, cv_folds=cv_folds, cv_repeats=cv_repeats, rmse_cv=rmse_cv, rmse=rmse, model=model, varname=varname, diff --git a/larch/math/transformations.py b/larch/math/transformations.py index a5e03aa57..aa67ce13d 100644 --- a/larch/math/transformations.py +++ b/larch/math/transformations.py @@ -64,8 +64,8 @@ Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using -numpy.dot(M, v) for shape (4, \*) column vectors, respectively -numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points"). +numpy.dot(M, v) for shape (4, *) column vectors, respectively +numpy.dot(v, M.T) for shape (*, 4) row vectors ("array of points"). This module follows the "column vectors on the right" and "row major storage" (C contiguous) conventions. The translation components are in the right column @@ -889,7 +889,7 @@ def orthogonalization_matrix(lengths, angles): def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True): """Return affine transform matrix to register two point sets. - v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous + v0 and v1 are shape (ndims, *) arrays of at least ndims non-homogeneous coordinates, where ndims is the dimensionality of the coordinate space. If shear is False, a similarity transformation matrix is returned. @@ -998,7 +998,7 @@ def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True): def superimposition_matrix(v0, v1, scale=False, usesvd=True): """Return matrix to transform given 3D point set into second point set. - v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points. + v0 and v1 are shape (3, *) or (4, *) arrays of at least 3 points. The parameters scale and usesvd are explained in the more general affine_matrix_from_points function. diff --git a/larch/math/utils.py b/larch/math/utils.py index 640e68947..355e367a0 100644 --- a/larch/math/utils.py +++ b/larch/math/utils.py @@ -6,6 +6,7 @@ from scipy.stats import linregress from scipy.interpolate import UnivariateSpline +from scipy.interpolate import InterpolatedUnivariateSpline as IUSpline from scipy.interpolate import interp1d as scipy_interp1d from .lineshapes import gaussian, lorentzian, voigt @@ -151,7 +152,7 @@ def interp(x, y, xnew, kind='linear', fill_value=np.nan, **kws): coefs = polyfit(x[sel], y[sel], 2) out[span] = coefs[0] + xnew[span]*(coefs[1] + coefs[2]*xnew[span]) elif kind.startswith('cubic'): - out[span] = UnivariateSpline(x[sel], y[sel], s=0)(xnew[span]) + out[span] = IUSpline(x[sel], y[sel])(xnew[span]) return out diff --git a/larch/plot/plotly_xafsplots.py b/larch/plot/plotly_xafsplots.py index bdd47dc19..077a8416d 100644 --- a/larch/plot/plotly_xafsplots.py +++ b/larch/plot/plotly_xafsplots.py @@ -706,7 +706,7 @@ def plot_chir(dgroup, show_mag=True, show_real=False, show_imag=False, #enddef -def plot_chiq(dgroup, kweight=None, kmax=None, show_chik=False, label=None, +def plot_chiq(dgroup, kweight=None, kmin=0, kmax=None, show_chik=False, label=None, title=None, new=True, delay_draw=False, offset=0, win=1, show_window=False, scale_window=True, _larch=None): """ @@ -747,9 +747,9 @@ def plot_chiq(dgroup, kweight=None, kmax=None, show_chik=False, label=None, fig = PlotlyFigure(two_yaxis=False) fig.add_plot(dgroup.k, chiq+offset, label=label) - - if kmin is not None or kmax is not None: - fig.set_xrange(kmin, kmax) + if kmax is None: + kmax = max(dgroup.k) + fig.set_xrange(kmin, kmax) ylabel = set_label_weight(plotlabels.chikw, kweight) fig.set_style(title=title, xaxis_title=plotlabels.k, diff --git a/larch/utils/__init__.py b/larch/utils/__init__.py index 23f6b2f22..e2c445229 100644 --- a/larch/utils/__init__.py +++ b/larch/utils/__init__.py @@ -150,7 +150,7 @@ def copy_xafs_group(group, _larch=None): for attr in dir(group): do_copy = True - if attr in ('xdat', 'ydat', 'i0', 'data' 'yerr', + if attr in ('xdat', 'ydat', 'i0', 'data', 'yerr', 'energy', 'mu'): val = getattr(group, attr)*1.0 elif attr in ('norm', 'flat', 'deriv', 'deconv', diff --git a/larch/utils/debugtime.py b/larch/utils/debugtime.py index c037e5b32..3452615da 100644 --- a/larch/utils/debugtime.py +++ b/larch/utils/debugtime.py @@ -6,6 +6,7 @@ class debugtime(object): def __init__(self, verbose=False, _larch=None, precision=3): self._larch = _larch + self.precision = precision self.clear() self.verbose = verbose self.add('init') diff --git a/larch/utils/jsonutils.py b/larch/utils/jsonutils.py index 7975d2e30..2c652593a 100644 --- a/larch/utils/jsonutils.py +++ b/larch/utils/jsonutils.py @@ -8,45 +8,64 @@ import h5py from datetime import datetime from collections import namedtuple +from types import ModuleType +import importlib import logging -HAS_STATE = {} -try: - from sklearn.cross_decomposition import PLSRegression - from sklearn.linear_model import LassoLarsCV, LassoLars, Lasso - HAS_STATE.update({'PLSRegression': PLSRegression, - 'LassoLarsCV':LassoLarsCV, - 'LassoLars': LassoLars, 'Lasso': Lasso}) - -except ImportError: - pass from lmfit import Parameter, Parameters from lmfit.model import Model, ModelResult from lmfit.minimizer import Minimizer, MinimizerResult from lmfit.parameter import SCIPY_FUNCTIONS -from larch import Group, isgroup, Journal, ParameterGroup - -from larch.xafs import FeffitDataSet, FeffDatFile, FeffPathGroup, TransformGroup -from larch.xafs.feffutils import FeffCalcResults -from larch.utils.strutils import bytes2str, str2bytes, fix_varname +from larch import Group, isgroup from larch.utils.logging import getLogger from larch.utils.logging import _levels as LoggingLevels -HAS_STATE['FeffCalcResults'] = FeffCalcResults -HAS_STATE['FeffDatFile'] = FeffDatFile -HAS_STATE['FeffPathGroup'] = FeffPathGroup -HAS_STATE['Journal'] = Journal +HAS_STATE = {} +LarchGroupTypes = {} + +def setup_larchtypes(): + global HAS_STATE, LarchGroupTypes + if len(HAS_STATE) == 0 or len(LarchGroupTypes)==0: + try: + from sklearn.cross_decomposition import PLSRegression + from sklearn.linear_model import LassoLarsCV, LassoLars, Lasso + HAS_STATE.update({'PLSRegression': PLSRegression, + 'LassoLarsCV':LassoLarsCV, + 'LassoLars': LassoLars, 'Lasso': Lasso}) + + except ImportError: + pass + + from larch import Journal, Group + + HAS_STATE['Journal'] = Journal + + from larch.xafs.feffutils import FeffCalcResults + HAS_STATE['FeffCalcResults'] = FeffCalcResults + + from larch.xafs import FeffDatFile, FeffPathGroup + HAS_STATE['FeffDatFile'] = FeffDatFile + HAS_STATE['FeffPathGroup'] = FeffPathGroup + + from larch import ParameterGroup + from larch.io.athena_project import AthenaGroup + from larch.xafs import FeffitDataSet, TransformGroup + + LarchGroupTypes = {'Group': Group, + 'AthenaGroup': AthenaGroup, + 'ParameterGroup': ParameterGroup, + 'FeffitDataSet': FeffitDataSet, + 'TransformGroup': TransformGroup, + 'MinimizerResult': MinimizerResult, + 'Minimizer': Minimizer, + 'FeffDatFile': FeffDatFile, + 'FeffPathGroup': FeffPathGroup, + } + + -LarchGroupTypes = {'Group': Group, - 'ParameterGroup': ParameterGroup, - 'FeffitDataSet': FeffitDataSet, - 'TransformGroup': TransformGroup, - 'MinimizerResult': MinimizerResult, - 'FeffDatFile': FeffDatFile, - 'FeffPathGroup': FeffPathGroup, - } def encode4js(obj): """return an object ready for json encoding. @@ -56,6 +75,7 @@ def encode4js(obj): Larch Groups Larch Parameters """ + setup_larchtypes() if obj is None: return None if isinstance(obj, np.ndarray): @@ -90,7 +110,6 @@ def encode4js(obj): out['writable'] = obj.writable() except ValueError: out['writable'] = False - return out elif isinstance(obj, h5py.File): return {'__class__': 'HDF5File', 'value': (obj.name, obj.filename, obj.mode, obj.libver), @@ -117,12 +136,20 @@ def encode4js(obj): out[encode4js(key)] = encode4js(val) return out elif isinstance(obj, logging.Logger): - level = 'DEBUG' for key, val in LoggingLevels.items(): if obj.level == val: level = key return {'__class__': 'Logger', 'name': obj.name, 'level': level} + elif isinstance(obj, Minimizer): + out = {'__class__': 'Minimizer'} + + for attr in ('userfcn', 'params', 'kw', 'scale_covar', 'max_nfev', + 'nan_policy', 'success', 'nfev', 'nfree', 'ndata', 'ier', + 'errorbars', 'message', 'lmdif_message', 'chisqr', + 'redchi', 'covar', 'userkws', 'userargs', 'result'): + out[attr] = encode4js(getattr(obj, attr, None)) + return out elif isinstance(obj, MinimizerResult): out = {'__class__': 'MinimizerResult'} for attr in ('aborted', 'aic', 'bic', 'call_kws', 'chisqr', @@ -162,6 +189,8 @@ def encode4js(obj): for item in dir(obj): out[item] = encode4js(getattr(obj, item)) return out + elif isinstance(obj, ModuleType): + return {'__class__': 'Module', 'value': obj.__name__} elif hasattr(obj, '__getstate__') and not callable(obj): return {'__class__': 'StatefulObject', '__type__': obj.__class__.__name__, @@ -196,6 +225,7 @@ def decode4js(obj): """ if not isinstance(obj, dict): return obj + setup_larchtypes() out = obj classname = obj.pop('__class__', None) if classname is None: @@ -240,6 +270,9 @@ def decode4js(obj): if obj['closed']: out.close() + elif classname == 'Module': + out = importlib.import_module(obj.__name__) + elif classname == 'Parameters': out = Parameters() out.clear() @@ -287,7 +320,20 @@ def decode4js(obj): pass # ignore class methods for subclassed Groups else: out[key] = decode4js(val) - if classname == 'FeffDatFile': + if classname == 'Minimizer': + userfunc = out.pop('userfcn') + params = out.pop('params') + kws = out.pop('kws') + for kname in ('scale_covar', 'max_nfev', 'nan_policy'): + kws[kname] = out.pop(kname) + mini = Minimizer(userfunc, params, **kws) + for kname in ('success', 'nfev', 'nfree', 'ndata', 'ier', + 'errorbars', 'message', 'lmdif_message', 'chisqr', + 'redchi', 'covar', 'userkws', 'userargs', 'result'): + setattr(mini, kname, out.pop(kname)) + out = mini + elif classname == 'FeffDatFile': + from larch.xafs import FeffDatFile path = FeffDatFile() path._set_from_dict(**out) out = path @@ -300,5 +346,5 @@ def decode4js(obj): out = SCIPY_FUNCTIONS.get(mname, None) else: - print("cannot decode ", classname, obj) + print("cannot decode ", classname, repr(obj)[:100]) return out diff --git a/larch/utils/strutils.py b/larch/utils/strutils.py index beb2c4417..50651556f 100644 --- a/larch/utils/strutils.py +++ b/larch/utils/strutils.py @@ -9,7 +9,9 @@ import uuid import hashlib from base64 import b64encode, b32encode -import random +from random import Random + +rng = Random() from packaging import version as pkg_version @@ -257,12 +259,16 @@ def get_sessionid(): return out.replace('/', '-').replace('+', '=') -def random_varname(n): +def random_varname(n, rng_seed=None): L = 'abcdefghijklmnopqrstuvwxyz0123456789' - return random.choice(L[:26]) + ''.join([random.choice(L) for _ in range(n-1)]) + + global rng + if rng_seed is None: + rng.seed(rng_seed) + return rng.choice(L[:26]) + ''.join([random.choice(L) for _ in range(n-1)]) -def file2groupname(filename, slen=9, minlen=2, symtable=None): +def file2groupname(filename, slen=9, minlen=2, symtable=None, rng_seed=None): """create a group name based of filename the group name will have a string component of length slen followed by a 2 digit number @@ -274,11 +280,14 @@ def file2groupname(filename, slen=9, minlen=2, symtable=None): symtable (None or larch symbol table) symbol table for checking that the group name is unique """ + global rng + if rng_seed is None: + rng.seed(rng_seed) gname = fix_varname(filename).lower().replace('_', '') if gname[0] not in 'abcdefghijklmnopqrstuvwxyz': - gname = random.choice(['a', 'b', 'c', 'd', 'e', 'f', 'g']) + gname + gname = rng.choice(['a', 'b', 'c', 'd', 'e', 'f', 'g']) + gname if len(gname) < minlen: gname = gname + random_varname(minlen-len(gname)) diff --git a/larch/wxlib/columnframe.py b/larch/wxlib/columnframe.py index f0c9f0830..8cbb27e25 100644 --- a/larch/wxlib/columnframe.py +++ b/larch/wxlib/columnframe.py @@ -588,8 +588,8 @@ def __init__(self, parent, filename=None, groupname=None, config=None, ypop='', monod=3.1355316, en_units=en_units, yerr_op='constant', yerr_val=1, yerr_arr=None, yrpop='', yrop='/', yref1='', yref2='', - is_trans=False, has_yref=False, dtc_config={}, multicol_config={}) + # print(" READ COL FILE xxx ", config) if config is not None: self.config.update(config) dtype = config.get('datatype', None) @@ -616,6 +616,8 @@ def __init__(self, parent, filename=None, groupname=None, config=None, if self.config['yref2'] is None and 'i1' in self.array_labels: self.config['yref2'] = 'i1' + use_trans = self.config.get('is_trans', False) or 'log' in self.config['ypop'] + message = "Data Columns for %s" % group.filename wx.Frame.__init__(self, None, -1, 'Build Arrays from Data Columns for %s' % group.filename, @@ -657,8 +659,7 @@ def subtitle(s, fontsize=12, colour=wx.Colour(10, 10, 180)): self.yerr_op.SetSelection(0) self.is_trans = Check(panel, label='is transmission data?', - default=self.config['is_trans'], - action=self.onTransCheck) + default=use_trans, action=self.onTransCheck) self.yerr_val = FloatCtrl(panel, value=1, precision=4, size=(75, -1)) self.monod_val = FloatCtrl(panel, value=3.1355316, precision=7, size=(75, -1)) @@ -683,6 +684,7 @@ def subtitle(s, fontsize=12, colour=wx.Colour(10, 10, 180)): self.yrop = Choice(panel, choices=ARR_OPS, action=self.onUpdate, size=(100, -1)) self.ysuf = SimpleText(panel, '') + # print("COL FILE READER set ypop to ", use_trans, self.config['ypop']) self.ypop.SetStringSelection(self.config['ypop']) self.yop.SetStringSelection(self.config['yop']) self.yrpop.SetStringSelection(self.config['yrpop']) @@ -696,6 +698,7 @@ def subtitle(s, fontsize=12, colour=wx.Colour(10, 10, 180)): if '(' in self.config['ypop']: self.ysuf.SetLabel(')') + ixsel, iysel = 0, 1 iy2sel = iyesel = iyr1sel = iyr2sel = len(yarr_labels)-1 if self.config['xarr'] in xarr_labels: @@ -729,7 +732,7 @@ def subtitle(s, fontsize=12, colour=wx.Colour(10, 10, 180)): self.wid_refgroupname = wx.TextCtrl(panel, value=group.groupname + '_ref', size=(150, -1)) - self.onTransCheck(is_trans=self.config['is_trans']) + self.onTransCheck(is_trans=use_trans) self.onYrefCheck(has_yref=self.config['has_yref']) @@ -1234,6 +1237,10 @@ def read_form(self, **kws): if datatype == 'raw': self.en_units.SetStringSelection('not energy') + ypop = self.ypop.GetStringSelection().strip() + self.is_trans.SetValue('log' in ypop) + + conf = {'datatype': datatype, 'ix': self.xarr.GetSelection(), 'xarr': self.xarr.GetStringSelection(), @@ -1244,7 +1251,7 @@ def read_form(self, **kws): 'iy1': self.yarr1.GetSelection(), 'iy2': self.yarr2.GetSelection(), 'yop': self.yop.GetStringSelection().strip(), - 'ypop': self.ypop.GetStringSelection().strip(), + 'ypop': ypop, 'iyerr': self.yerr_arr.GetSelection(), 'yerr_arr': self.yerr_arr.GetStringSelection(), 'yerr_op': self.yerr_op.GetStringSelection().lower(), @@ -1313,7 +1320,6 @@ def create_arrays(dgroup, datatype='xas', ix=0, xarr='energy', en_units='eV', ncol, npts = dgroup.data.shape exprs = dict(xdat=None, ydat=None, yerr=None, yref=None) - # print("CREATE ARRAYS ", dgroup, datatype, ncol, npts) if not hasattr(dgroup, 'index'): dgroup.index = 1.0*np.arange(npts) diff --git a/larch/wxlib/plotter.py b/larch/wxlib/plotter.py index d1573db14..e02a7268a 100644 --- a/larch/wxlib/plotter.py +++ b/larch/wxlib/plotter.py @@ -306,7 +306,8 @@ def get_display(win=1, _larch=None, wxparent=None, size=None, if wintitle is not None: title = wintitle - def _get_disp(symname, creator, win, ddict, wxparent, size, height, width, _larch): + def _get_disp(symname, creator, win, ddict, wxparent, + size, height, width, _larch): wxapp = get_wxapp() display = None new_display = False @@ -342,9 +343,6 @@ def _get_disp(symname, creator, win, ddict, wxparent, size, height, width, _larc xsiz, ysiz = parent.GetSize() x = xpos + xsiz*0.75 y = ypos + ysiz*0.75 - dlims = DISPLAY_LIMITS - if dlims is None: - dlims = [0, 5000, 0, 5000] if len(PLOT_DISPLAYS) > 0: try: xpos, ypos = PLOT_DISPLAYS[1].GetPosition() @@ -352,8 +350,12 @@ def _get_disp(symname, creator, win, ddict, wxparent, size, height, width, _larc except: pass off = 0.20*(win-1) - x = max(20, xpos + xsiz*off) - y = max(20, ypos + ysiz*off) + x = max(25, xpos + xsiz*off) + y = max(25, ypos + ysiz*off) + global DISPLAY_LIMITS + dlims = DISPLAY_LIMITS + if dlims is None: + dlims = [0, 5000, 0, 5000] if y+0.75*ysiz > dlims[3]: y = 40+max(40, 40+ysiz*(off-0.5)) if x+0.75*xsiz > dlims[1]: @@ -476,7 +478,8 @@ def _xrf_oplot(x=None, y=None, mca=None, win=1, _larch=None, **kws): _xrf_plot(x=x, y=y, mca=mca, win=win, _larch=_larch, new=False, **kws) def _plot(x,y, win=1, new=False, _larch=None, wxparent=None, size=None, - xrf=False, stacked=False, force_draw=True, side='left', wintitle=None, **kws): + xrf=False, stacked=False, force_draw=True, side='left', + wintitle=None, **kws): """plot(x, y[, win=1], options]) Plot 2-D trace of x, y arrays in a Plot Frame, clearing any plot currently in the Plot Frame. @@ -515,7 +518,7 @@ def _plot(x,y, win=1, new=False, _larch=None, wxparent=None, size=None, """ plotter = get_display(wxparent=wxparent, win=win, size=size, xrf=xrf, stacked=stacked, - wintitle=wintitle, _larch=_larch) + wintitle=wintitle, _larch=_larch) if plotter is None: return plotter.Raise() diff --git a/larch/wxlib/xafsplots.py b/larch/wxlib/xafsplots.py index 1613dcc99..4611d552c 100644 --- a/larch/wxlib/xafsplots.py +++ b/larch/wxlib/xafsplots.py @@ -284,7 +284,7 @@ def plot_mu(dgroup, show_norm=False, show_flat=False, show_deriv=False, redraw(win=win, xmin=emin, xmax=emax, _larch=_larch) #enddef -def plot_bkg(dgroup, norm=True, emin=None, emax=None, show_e0=False, +def plot_bkg(dgroup, norm=True, emin=None, emax=None, show_e0=False, show_ek0=False, label=None, title=None, new=True, delay_draw=False, offset=0, win=1, _larch=None): """ @@ -296,9 +296,10 @@ def plot_bkg(dgroup, norm=True, emin=None, emax=None, show_e0=False, ---------- dgroup group of XAFS data after autobk() results (see Note 1) norm bool whether to show normalized data [True] - emin min energy to show, absolute or relative to E0 [None, start of data] - emax max energy to show, absolute or relative to E0 [None, end of data] + emin min energy to show, absolute or relative to E0 [None, start of data] + emax max energy to show, absolute or relative to E0 [None, end of data] show_e0 bool whether to show E0 [False] + show_ek0 bool whether to show EK0 [False] label string for label [``None``: 'mu'] title string for plot titlte [None, may use filename if available] new bool whether to start a new plot [True] @@ -344,9 +345,21 @@ def plot_bkg(dgroup, norm=True, emin=None, emax=None, show_e0=False, ymin, ymax = xylims[2], xylims[3] _plot(dgroup.energy, bkg+offset, zorder=18, label='bkg', **opts) - if show_e0: - _plot_axvline(dgroup.e0, zorder=2, size=3, label='E0', - color=plotlabels.e0color, win=win, _larch=_larch) + e0val, e0label = None, 'E0' + if show_e0 and hasattr(dgroup, 'e0'): + e0val = dgroup.e0 + elif show_ek0 and hasattr(dgroup, 'ek0'): + e0val, e0label = dgroup.ek0, 'EK0' + + if e0val is not None: + ie0 = index_of(dgroup.energy, e0val) + ee0 = dgroup.energy[ie0] + me0 = mu[ie0] + offset + disp.panel.axes.plot([ee0], [me0], marker='o', + markersize=5, label='_nolegend_', + markerfacecolor='#808080', + markeredgecolor='#A03030') + if disp is not None: disp.panel.conf.draw_legend() #endif diff --git a/larch/wxmap/mapviewer.py b/larch/wxmap/mapviewer.py index 11bc6aafc..f66f8d390 100644 --- a/larch/wxmap/mapviewer.py +++ b/larch/wxmap/mapviewer.py @@ -1328,9 +1328,9 @@ class MapViewerFrame(wx.Frame): cursor_menulabels = {'lasso': ('Select Points for XRF Spectra\tCtrl+X', 'Left-Drag to select points for XRF Spectra')} - def __init__(self, parent=None, filename=None, _larch=None, - use_scandb=False, check_version=True, - size=(925, 650), **kwds): + def __init__(self, parent=None, filename=None, _larch=None, title=None, + use_scandb=False, check_version=True, size=(925, 650), + **kwds): if check_version: def check_version(): @@ -1366,7 +1366,9 @@ def check_version(): self.dtcor = True self.showxrd = False - self.SetTitle('GSE XRM MapViewer') + if title is None: + title = "XRF Map Viewing and Analysis" + self.SetTitle(title) self.createMainPanel() self.SetFont(Font(FONTSIZE)) @@ -2747,8 +2749,10 @@ def onBROWSEfile(self,event=None,i=1): class MapViewer(LarchWxApp): def __init__(self, use_scandb=False, _larch=None, filename=None, - check_version=True, with_inspect=False, **kws): + title=None, check_version=True, with_inspect=False, **kws): + self.filename = filename + self.title = title self.use_scandb = use_scandb self.check_version = check_version LarchWxApp.__init__(self, _larch=_larch, @@ -2756,6 +2760,7 @@ def __init__(self, use_scandb=False, _larch=None, filename=None, def createApp(self): frame = MapViewerFrame(use_scandb=self.use_scandb, + title=self.title, filename=self.filename, check_version=self.check_version, _larch=self._larch) diff --git a/larch/wxxas/config.py b/larch/wxxas/config.py index 81fbe7d57..9de83fb24 100644 --- a/larch/wxxas/config.py +++ b/larch/wxxas/config.py @@ -63,7 +63,7 @@ def make_array_choice(opts): PlotWindowChoices = ['1', '2', '3', '4', '5', '6', '7', '8', '9'] -NNORM_CHOICES = {'auto':None, 'constant':0, 'linear':1, 'quadratic':2, 'cubic':3} +NNORM_CHOICES = {'constant':0, 'linear':1, 'quadratic':2, 'cubic':3} NORM_METHODS = ('polynomial', 'mback') ATHENA_CLAMPNAMES = {'none': 0, 'slight': 1, 'weak': 5, 'medium': 20, @@ -230,6 +230,7 @@ def __repr__(self): ] xasnorm = [CVar('auto_e0', True, 'bool', desc='whether to automatically set E0'), + CVar('auto_nnorm', True, 'bool', desc='whether to automatically set normalization polynomial'), CVar('auto_step', True, 'bool', desc='whether to automatically set edge step'), CVar('show_e0', True, 'bool', desc='whether to show E0'), CVar('energy_shift', 0., 'float', desc='value of Energy shift from original data'), @@ -240,7 +241,7 @@ def __repr__(self): CVar('nvict', 0, 'int', min=0, max=3, desc='Victoreen order for pre-edge fitting\n(Energy^(-nvict))'), CVar('show_pre', False, 'bool', desc='whether to show pre-edge energy range (pre1, pre2)'), CVar('norm_method', 'polynomial', 'choice', choices=NORM_METHODS, desc='normalization method'), - CVar('nnorm', 'auto', 'choice', choices=list(NNORM_CHOICES.keys()), + CVar('nnorm', 'linear', 'choice', choices=list(NNORM_CHOICES.keys()), desc='type of polynomial for normalization'), CVar('norm1', 150, 'float', step=5, desc='low-energy fit range for normalization curve,\nrelative to E0'), CVar('norm2', -1, 'float', step=5, desc='high-energy fit range for normalization curve,\nelative to E0 (set to -1 for "auto")'), diff --git a/larch/wxxas/exafs_panel.py b/larch/wxxas/exafs_panel.py index 2d743fbb0..0c7699932 100644 --- a/larch/wxxas/exafs_panel.py +++ b/larch/wxxas/exafs_panel.py @@ -13,7 +13,7 @@ from larch.math import index_of from larch.wxlib import (BitmapButton, FloatCtrl, FloatSpin, ToggleButton, get_icon, SimpleText, pack, Button, HLine, Choice, - plotlabels, Check, CEN, RIGHT, LEFT) + TextCtrl, plotlabels, Check, CEN, RIGHT, LEFT) from larch.xafs.xafsutils import etok, ktoe, FT_WINDOWS from larch.xafs.pre_edge import find_e0 @@ -41,10 +41,10 @@ PlotOne_Choices = [mu_bkg, chie, chik, chikwin, chirmag, chirre, chirmr, wavelet, chir_w, chiq, chikq] PlotAlt_Choices = [noplot] + PlotOne_Choices -PlotSel_Choices = [chie, chik, chirmag, chirre] +PlotSel_Choices = [chie, chik, chirmag, chirre, chiq] -PlotCmds = {mu_bkg: "plot_bkg({group:s}", +PlotCmds = {mu_bkg: "plot_bkg({group:s}, show_ek0={show_ek0}", chie: "plot_chie({group:s}", chik: "plot_chik({group:s}, show_window=False, kweight={plot_kweight:.0f}", chikwin: "plot_chik({group:s}, show_window=True, kweight={plot_kweight:.0f}", @@ -124,14 +124,37 @@ def build_display(self): action=self.onProcess) wids['plot_win'].SetStringSelection('2') + ek0_panel = wx.Panel(panel) opts = dict(digits=2, increment=0.1, min_val=0, action=self.onProcess) - wids['ek0'] = FloatSpin(panel, **opts) - - wids['push_e0'] = Button(panel, 'Use as Normalization E0', size=(200, -1), + wids['ek0'] = FloatSpin(ek0_panel, **opts) + wids['show_ek0'] = Check(ek0_panel, default=True, label='show?', + action=self.onShowEk0) + sx = wx.BoxSizer(wx.HORIZONTAL) + sx.Add(self.wids['ek0'], 0, LEFT, 4) + sx.Add(self.wids['show_ek0'], 0, LEFT, 4) + pack(ek0_panel, sx) + + wids['push_e0'] = Button(panel, 'Use as Normalization E0', size=(180, -1), action=self.onPushE0) wids['push_e0'].SetToolTip('Use this value for E0 in the Normalization Tab') + # + wids['plotopt_name'] = TextCtrl(panel, 'kspace, kw=2', size=(150, -1), + action=self.onPlotOptSave, + act_on_losefocus=False) + wids['plotopt_name'].SetToolTip('Name this set of Plot Choices') + + self.plotopt_saves = {'kspace, kw=2': {'plotone_op': chik, 'plotsel_op': chik, + 'plotalt_op': noplot, 'plot_voffset': 0.0, + 'plot_kweight': 2, 'plot_kweight_alt': 2, + 'plot_rmax': 8}} + + wids['plotopt_sel'] = Choice(panel, size=(150, -1), + choices=list(self.plotopt_saves.keys()), + action=self.onPlotOptSel) + + opts['max_val'] = 6 opts['action'] = self.onRbkg wids['rbkg'] = FloatSpin(panel, value=1.0, **opts) @@ -190,22 +213,29 @@ def CopyBtn(name): panel.Add(self.wids['plotsel_op'], dcol=2) add_text('Vertical offset: ', newrow=False) - panel.Add(wids['plot_voffset'], dcol=2) + panel.Add(wids['plot_voffset'], style=RIGHT) panel.Add(plot_one, newrow=True) panel.Add(self.wids['plotone_op'], dcol=2) add_text('Plot k weight: ', newrow=False) - panel.Add(wids['plot_kweight']) + panel.Add(wids['plot_kweight'], style=RIGHT) add_text('Add Second Plot: ', newrow=True) panel.Add(self.wids['plotalt_op'], dcol=2) add_text('Plot2 k weight: ', newrow=False) - panel.Add(wids['plot_kweight_alt']) + panel.Add(wids['plot_kweight_alt'], style=RIGHT) add_text('Window for Second Plot: ', newrow=True) panel.Add(self.wids['plot_win'], dcol=2) add_text('Plot R max: ', newrow=False) - panel.Add(wids['plot_rmax']) + panel.Add(wids['plot_rmax'], style=RIGHT) + + add_text('Save Plot Options as: ', newrow=True) + panel.Add(self.wids['plotopt_name'], dcol=2) + + add_text('Use Saved Plot Options: ', dcol=1, newrow=False) + panel.Add(self.wids['plotopt_sel'], dcol=1) + panel.Add(HLine(panel, size=(500, 3)), dcol=6, newrow=True) @@ -216,8 +246,8 @@ def CopyBtn(name): add_text('E k=0: ') - panel.Add(wids['ek0']) - panel.Add(wids['push_e0'], dcol=2) + panel.Add(ek0_panel, dcol=2) + panel.Add(wids['push_e0'], dcol=1) panel.Add(CopyBtn('ek0'), style=RIGHT) add_text('R bkg: ') @@ -392,11 +422,15 @@ def fill_form(self, dgroup): for attr in ('bkg_kmin', 'bkg_kmax', 'bkg_kweight', 'fft_kmin', 'fft_kmax', 'fft_kweight', 'fft_dk', 'fft_rmaxout', - 'plot_rmax'): + ): try: wids[attr].SetValue(float(opts.get(attr))) except: pass + for attr in ('fft_kwindow',): + if attr in opts: + wids[attr].SetStringSelection(opts[attr]) + for attr in ('bkg_clamplo', 'bkg_clamphi'): val = opts.get(attr, 0) @@ -408,11 +442,7 @@ def fill_form(self, dgroup): try: wids[attr].SetStringSelection("%d" % int(val)) except: - print(f"could not set '{attr:s}' to {val}") - - for attr in ('fft_kwindow', 'plotone_op', 'plotalt_op'): # 'plotsel_op', - if attr in opts: - wids[attr].SetStringSelection(opts[attr]) + pass frozen = opts.get('is_frozen', False) if hasattr(dgroup, 'is_frozen'): @@ -450,6 +480,8 @@ def read_form(self, dgroup=None, as_copy=False): for attr in ('fft_kwindow', 'fft_rwindow', 'plotone_op', 'plotsel_op', 'plotalt_op'): conf[attr] = wids[attr].GetStringSelection() + conf['show_ek0'] = wids['show_ek0'].IsChecked() + time.sleep(0.001) self.skip_process = skip_save if as_copy: @@ -461,6 +493,10 @@ def read_form(self, dgroup=None, as_copy=False): def onSaveConfigBtn(self, evt=None): self.set_defaultconfig(self.read_form()) + def onShowEk0(self, evt=None): + print("show ek0 ", evt) + + def onPushE0(self, evt=None): conf = self.read_form() dgroup = self.controller.get_group() @@ -604,6 +640,40 @@ def plot(self, dgroup=None): return self.onPlotOne(dgroup=dgroup) + + def onPlotOptSave(self, name=None, event=None): + data = {} + if name is None or len(name) < 1: + name = f"view {len(self.plotopt_saves)+1}" + + name = name.strip() + for attr in ('plot_voffset', 'plot_kweight', + 'plot_kweight_alt', 'plot_rmax'): + data[attr] = self.wids[attr].GetValue() + + for attr in ('plotone_op', 'plotsel_op', 'plotalt_op'): + data[attr] = self.wids[attr].GetStringSelection() + self.plotopt_saves[name] = data + + choices = list(reversed(self.plotopt_saves.keys())) + self.wids['plotopt_sel'].SetChoices(choices) + self.wids['plotopt_sel'].SetSelection(0) + + + def onPlotOptSel(self, event=None): + name = event.GetString() + data = self.plotopt_saves.get(name, None) + if data is not None: + for attr in ('plot_voffset', 'plot_kweight', + 'plot_kweight_alt', 'plot_rmax'): + self.wids[attr].SetValue(data[attr]) + + for attr in ('plotone_op', 'plotsel_op', 'plotalt_op'): + self.wids[attr].SetStringSelection(data[attr]) + + self.plot() + + def onPlotOne(self, evt=None, dgroup=None): if self.skip_plotting: return @@ -614,6 +684,7 @@ def onPlotOne(self, evt=None, dgroup=None): self.process(dgroup=self.dgroup) conf['title'] = '"%s"' % self.dgroup.filename + # print(" onPlotOne ", conf['plotone_op']) cmd = PlotCmds[conf['plotone_op']] + ", win=1, title={title:s})" # 2nd plot cmd2 = PlotCmds[conf['plotalt_op']] @@ -623,6 +694,7 @@ def onPlotOne(self, evt=None, dgroup=None): cmd = "%s\n%s" % (cmd, cmd2) self.controller.get_display(win=2) + # print(" onPlotOne ", cmd.format(**conf)) self.larch_eval(cmd.format(**conf)) self.last_plot = 'one' diff --git a/larch/wxxas/feffit_panel.py b/larch/wxxas/feffit_panel.py index 144c39895..48ff0ba61 100644 --- a/larch/wxxas/feffit_panel.py +++ b/larch/wxxas/feffit_panel.py @@ -14,7 +14,6 @@ import numpy as np np.seterr(all='ignore') - import wx import wx.lib.scrolledpanel as scrolled @@ -56,10 +55,10 @@ # PlotOne_Choices = [chik, chirmag, chirre, chirmr] -PlotOne_Choices = make_array_choice(['chi','chir_mag', 'chir_re', 'chir_mag+chir_re', 'chiq']) -PlotAlt_Choices = make_array_choice(['noplot', 'chi','chir_mag', 'chir_re', 'chir_mag+chir_re']) +Plot1_Choices = make_array_choice(['chi','chir_mag', 'chir_re', 'chir_mag+chir_re', 'chiq']) +Plot2_Choices = make_array_choice(['noplot', 'chi','chir_mag', 'chir_re', 'chir_mag+chir_re', 'chiq']) -# PlotAlt_Choices = [noplot] + PlotOne_Choices +# Plot2_Choices = [noplot] + Plot1_Choices ScriptWcards = "Fit Models(*.lar)|*.lar|All files (*.*)|*.*" @@ -129,7 +128,10 @@ """ COMMANDS['ff2chi'] = """# sum paths using a list of paths and a group of parameters -_pathsum = ff2chi({paths:s}, paramgroup=_feffit_params) +_feffit_dataset = feffit_dataset(data={groupname:s}, transform={trans:s}, + refine_bkg={refine_bkg}, + paths={paths:s}) +_feffit_dataset.model = ff2chi({paths:s}, paramgroup=_feffit_params) """ COMMANDS['do_feffit'] = """# build feffit dataset, run feffit @@ -778,7 +780,7 @@ def build_display(self): fit_rmax = self.add_floatspin('fit_rmax', value=5, **fsopts) fit_rmin = self.add_floatspin('fit_rmin', value=1, action=self.onRmin, **fsopts) - wids['fit_kwstring'] = Choice(pan, size=(150, -1), + wids['fit_kwstring'] = Choice(pan, size=(120, -1), choices=list(Feffit_KWChoices.keys())) wids['fit_kwstring'].SetSelection(1) @@ -787,36 +789,49 @@ def build_display(self): wids['fit_space'] = Choice(pan, choices=list(Feffit_SpaceChoices.keys()), size=(150, -1)) - wids['plotone_op'] = Choice(pan, choices=list(PlotOne_Choices.keys()), + wids['plot_kw'] = Choice(pan, size=(80, -1), + choices=['0', '1', '2', '3', '4'], default=2) + + wids['plot1_op'] = Choice(pan, choices=list(Plot1_Choices.keys()), action=self.onPlot, size=(150, -1)) - wids['plotone_op'].SetSelection(1) - wids['plotalt_op'] = Choice(pan, choices=list(PlotAlt_Choices.keys()), + wids['plot1_op'].SetSelection(1) + + wids['plot1_voff'] = FloatSpin(pan, value=0, digits=2, increment=0.25, + size=(100, -1), action=self.onPlot) + + wids['plot1_paths'] = Check(pan, default=False, label='Plot Each Path', + action=self.onPlot) + wids['plot1_ftwins'] = Check(pan, default=False, label='Plot FT Windows', + action=self.onPlot) + + + wids['plot2_win'] = Choice(pan, choices=PlotWindowChoices, + action=self.onPlot, size=(55, -1)) + wids['plot2_win'].SetStringSelection('2') + wids['plot2_win'].SetToolTip('Plot Window for Second Plot') + + wids['plot2_op'] = Choice(pan, choices=list(Plot2_Choices.keys()), action=self.onPlot, size=(150, -1)) - wids['plot_win'] = Choice(pan, choices=PlotWindowChoices, - action=self.onPlot, size=(60, -1)) - wids['plot_win'].SetStringSelection('2') - wids['plot_voffset'] = FloatSpin(pan, value=0, digits=2, increment=0.25, + wids['plot2_voff'] = FloatSpin(pan, value=0, digits=2, increment=0.25, size=(100, -1), action=self.onPlot) - wids['plot_paths'] = Check(pan, default=False, label='Plot Each Path', + wids['plot2_paths'] = Check(pan, default=False, label='Plot Each Path', action=self.onPlot) - wids['plot_ftwindows'] = Check(pan, default=False, label='Plot FT Windows', + wids['plot2_ftwins'] = Check(pan, default=False, label='Plot FT Windows', action=self.onPlot) - wids['refine_bkg'] = Check(pan, default=False, - label='Refine Background during Fit?') wids['plot_current'] = Button(pan,'Plot Current Model', action=self.onPlot, size=(175, -1)) + + wids['refine_bkg'] = Check(pan, default=False, + label='Refine Background during Fit?') wids['do_fit'] = Button(pan, 'Fit Data to Model', action=self.onFitModel, size=(175, -1)) wids['show_results'] = Button(pan, 'Show Fit Results', action=self.onShowResults, size=(175, -1)) wids['show_results'].Disable() -# wids['do_fit_sel']= Button(pan, 'Fit Selected Groups', -# action=self.onFitSelected, size=(125, -1)) -# wids['do_fit_sel'].Disable() def add_text(text, dcol=1, newrow=True): pan.Add(SimpleText(pan, text), dcol=dcol, newrow=newrow) @@ -829,7 +844,7 @@ def add_text(text, dcol=1, newrow=True): pan.Add(wids['fit_space']) add_text('k weightings: ', newrow=False) - pan.Add(wids['fit_kwstring']) + pan.Add(wids['fit_kwstring'], dcol=3) add_text('k min: ') pan.Add(fit_kmin) @@ -851,18 +866,27 @@ def add_text(text, dcol=1, newrow=True): pan.Add(HLine(pan, size=(600, 2)), dcol=6, newrow=True) pan.Add(wids['plot_current'], dcol=1, newrow=True) - pan.Add(wids['plotone_op'], dcol=1) + pan.Add(wids['plot1_op'], dcol=1) + add_text('k-weight:' , newrow=False) + pan.Add(wids['plot_kw'], dcol=1) - pan.Add(wids['plot_paths'], newrow=True) - pan.Add(wids['plot_ftwindows']) - # pan.Add(ppanel, dcol=2, newrow=True) + pan.Add(wids['plot1_ftwins'], newrow=True) + pan.Add(wids['plot1_paths']) add_text('Vertical Offset' , newrow=False) - pan.Add(wids['plot_voffset']) + pan.Add(wids['plot1_voff']) + + + add_text('Add Second Plot: ') + + pan.Add(wids['plot2_op'], dcol=1) + add_text('Plot Window:' , newrow=False) + pan.Add(wids['plot2_win']) + + pan.Add(wids['plot2_ftwins'], newrow=True) + pan.Add(wids['plot2_paths']) + add_text('Vertical Offset' , newrow=False) + pan.Add(wids['plot2_voff']) - add_text('Second Plot: ', newrow=True) - pan.Add(wids['plotalt_op'], dcol=1) - add_text('Plot Window: ', newrow=False) - pan.Add(wids['plot_win'], dcol=1) pan.Add(wids['do_fit'], dcol=1, newrow=True) pan.Add(wids['show_results']) @@ -934,7 +958,7 @@ def get_config(self, dgroup=None): econf = getattr(dgroup.config, 'exafs', {}) for key in ('fit_kmin', 'fit_kmax', 'fit_dk', - 'fit_rmin', 'fit_rmax', 'fit_dr' + 'fit_rmin', 'fit_rmax', 'fit_dr', 'fit_kwindow', 'fit_rwindow'): alt = key.replace('fit', 'fft') val = conf.get(key, -1) @@ -966,7 +990,7 @@ def process(self, dgroup=None, **kws): for attr in ('fit_kmin', 'fit_kmax', 'fit_dk', 'fit_rmin', 'fit_rmax', 'fit_kwindow', 'fit_rwindow', 'fit_dr', 'fit_kwstring', 'fit_space', - 'fit_plot', 'plot_paths'): + 'fit_plot', 'plot1_paths'): conf[attr] = opts.get(attr, None) @@ -1028,13 +1052,17 @@ def read_form(self, dgroup=None): form_opts['fit_space'] = Feffit_SpaceChoices[fitspace_string] form_opts['fit_kwindow'] = wids['fit_kwindow'].GetStringSelection() - form_opts['plot_ftwindows'] = wids['plot_ftwindows'].IsChecked() - form_opts['plot_paths'] = wids['plot_paths'].IsChecked() - form_opts['plotone_op'] = PlotOne_Choices[wids['plotone_op'].GetStringSelection()] - form_opts['plotalt_op'] = PlotAlt_Choices[wids['plotalt_op'].GetStringSelection()] - form_opts['plot_voffset'] = wids['plot_voffset'].GetValue() - form_opts['plot_win'] = int(wids['plot_win'].GetStringSelection()) - + form_opts['plot_kw'] = int(wids['plot_kw'].GetStringSelection()) + form_opts['plot1_ftwins'] = wids['plot1_ftwins'].IsChecked() + form_opts['plot1_paths'] = wids['plot1_paths'].IsChecked() + form_opts['plot1_op'] = Plot1_Choices[wids['plot1_op'].GetStringSelection()] + form_opts['plot1_voff'] = wids['plot1_voff'].GetValue() + + form_opts['plot2_op'] = Plot2_Choices[wids['plot2_op'].GetStringSelection()] + form_opts['plot2_ftwins'] = wids['plot2_ftwins'].IsChecked() + form_opts['plot2_paths'] = wids['plot2_paths'].IsChecked() + form_opts['plot2_voff'] = wids['plot2_voff'].GetValue() + form_opts['plot2_win'] = int(wids['plot2_win'].GetStringSelection()) return form_opts @@ -1056,13 +1084,44 @@ def fill_model_params(self, prefix, params): if wids.vary is not None: wids.vary.SetStringSelection(varstr) - def onPlot(self, evt=None, dgroup=None, pargroup_name='_feffit_params', - paths_name='_feffpaths', pathsum_name='_pathsum', title=None, - dataset_name=None, build_fitmodel=True, topwin=None, **kws): - # feffit plot + def onPlot(self, evt=None, dataset_name='_feffit_dataset', + pargroup_name='_feffit_params', title=None, build_fitmodel=True, + topwin=None, **kws): + + dataset = getattr(self.larch.symtable, dataset_name, None) + if dataset is None: + dgroup = self.controller.get_group() + else: + dgroup = dataset.data + self.process(dgroup) opts = self.read_form(dgroup=dgroup) opts.update(**kws) + + if build_fitmodel: + self.build_fitmodel(dgroup) + + dataset = self.larch.eval(dataset_name) + if dataset is None: + print("could not get dataset : ", dataset_name) + return + + model_name = dataset_name + '.model' + paths_name = dataset_name + '.paths' + paths = self.larch.eval(paths_name) + + data_name = dataset_name + '.data' + refine_bkg = getattr(dataset, 'refine_bkg', + opts.get('refine_bkg', False)) + + # print("REFINE BKG ", + # getattr(dataset, 'refine_bkg', None), + # opts.get('refine_bkg', None), + # hasattr(dataset, 'data_rebkg')) + + if refine_bkg and hasattr(dataset, 'data_rebkg'): + data_name = dataset_name + '.data_rebkg' + fname = opts['filename'] if title is None: title = fname @@ -1071,69 +1130,49 @@ def onPlot(self, evt=None, dgroup=None, pargroup_name='_feffit_params', if "'" in title: title = title.replace("'", "\\'") - gname = opts['groupname'] - if dataset_name is None: - dataset_name = gname - - if dgroup is None: - dgroup = opts['datagroup'] - exafs_conf = self.xasmain.get_nbpage('exafs')[1].read_form() plot_rmax = exafs_conf['plot_rmax'] - if build_fitmodel: - self.build_fitmodel(dgroup) + plot1 = opts['plot1_op'] + plot2 = opts['plot2_op'] - try: - pathsum = self._plain_larch_eval(pathsum_name) - except: - pathsum = None + cmds = ["#### plot ", + f"# build arrays for plotting: refine bkg? {refine_bkg}, {dgroup.groupname} / {dataset_name}"] - try: - paths = self._plain_larch_eval(paths_name) - except: - paths = {} - - plot1 = opts['plotone_op'] - plot2 = opts['plotalt_op'] - cmds = [] - - kw = opts['fit_kweight'] + kweight = opts['plot_kw'] ftargs = dict(kmin=opts['fit_kmin'], kmax=opts['fit_kmax'], dk=opts['fit_dk'], - kwindow=opts['fit_kwindow'], kweight=opts['fit_kweight'], + kwindow=opts['fit_kwindow'], kweight=kweight, rmin=opts['fit_rmin'], rmax=opts['fit_rmax'], dr=opts.get('fit_dr', 0.1), rwindow='hanning') - if pathsum is not None: - cmds.append(COMMANDS['xft'].format(groupname=pathsum_name, **ftargs)) - if dataset_name is not None: - cmds.append(COMMANDS['xft'].format(groupname=dataset_name, **ftargs)) - if dgroup is not None: - cmds.append(COMMANDS['xft'].format(groupname=gname, **ftargs)) - if opts['plot_paths']: + if model_name is not None: + cmds.append(COMMANDS['xft'].format(groupname=model_name, **ftargs)) + if data_name is not None: + cmds.append(COMMANDS['xft'].format(groupname=data_name, **ftargs)) + + if opts['plot1_paths'] or opts['plot2_paths']: cmds.append(COMMANDS['path2chi'].format(paths_name=paths_name, pargroup_name=pargroup_name, **ftargs)) self.larch_eval('\n'.join(cmds)) - with_win = opts['plot_ftwindows'] needs_qspace = False cmds = [] for i, plot in enumerate((plot1, plot2)): - if plot in PlotAlt_Choices: - plot = PlotAlt_Choices[plot] - - if plot in ('noplot', ''): - continue + if plot in Plot2_Choices: + plot = Plot2_Choices[plot] plotwin = 1 - if i > 0: - plotwin = int(opts.get('plot_win', '2')) + if i == 1: + if plot in ('noplot', ''): + continue + else: + plotwin = int(opts.get('plot2_win', '2')) pcmd = 'plot_chir' pextra = f', win={plotwin:d}' if plot == 'chi': pcmd = 'plot_chik' - pextra += f', kweight={kw:d}' + pextra += f', kweight={kweight:d}' elif plot == 'chir_mag': pcmd = 'plot_chir' pextra += f', rmax={plot_rmax}' @@ -1148,17 +1187,17 @@ def onPlot(self, evt=None, dgroup=None, pargroup_name='_feffit_params', else: print(" do not know how to plot ", plot) continue - + with_win = opts[f'plot{i+1}_ftwins'] newplot = f', show_window={with_win}, new=True' overplot = f', show_window=False, new=False' if dgroup is not None: - cmds.append(f"{pcmd}({dataset_name:s}, label='data'{pextra}, title='{title}'{newplot})") - if pathsum is not None: - cmds.append(f"{pcmd}({pathsum_name:s}, label='model'{pextra}{overplot})") - elif pathsum is not None: - cmds.append(f"{pcmd}({pathsum_name:s}, label='Path sum'{pextra}, title='sum of paths'{newplot})") - if opts['plot_paths']: - voff = opts['plot_voffset'] + cmds.append(f"{pcmd}({data_name:s}, label='data'{pextra}, title='{title}'{newplot})") + if dataset.model is not None: + cmds.append(f"{pcmd}({model_name:s}, label='model'{pextra}{overplot})") + elif dataset.model is not None: + cmds.append(f"{pcmd}({model_name:s}, label='Path sum'{pextra}, title='sum of paths'{newplot})") + if opts[f'plot{i+1}_paths']: + voff = opts[f'plot{i+1}_voff'] for i, label in enumerate(paths.keys()): if paths[label].use: @@ -1172,7 +1211,8 @@ def onPlot(self, evt=None, dgroup=None, pargroup_name='_feffit_params', cmds.append(f"{pcmd}({objname}, label='{label:s}'{pextra}, offset={(i+1)*voff}{overplot})") self.larch_eval('\n'.join(cmds)) - self.controller.set_focus(topwin=topwin) + if topwin is not None: + self.controller.set_focus(topwin=topwin) def reset_paths(self, event=None): @@ -1435,7 +1475,20 @@ def build_fitmodel(self, groupname=None): opts['paths'].append(pdat) paths_string = '[%s]' % (', '.join(paths_list)) - cmds.append(COMMANDS['ff2chi'].format(paths=paths_string)) + + +# _feffit_dataset = feffit_dataset(data={groupname:s}, transform={trans:s}, +# refine_bkg={refine_bkg}, +# paths={paths:s}) +# _feffit_dataset.model = ff2chi({paths:s}, paramgroup=_feffit_params) + + cmds.append(COMMANDS['ff2chi'].format(paths=paths_string, + trans='_feffit_trans', + groupname=opts['groupname'], + refine_bkg=opts['refine_bkg']) + ) + cmds.append('# end of build model') + self.larch_eval("\n".join(cmds)) return opts @@ -1522,8 +1575,7 @@ def onFitModel(self, event=None, dgroup=None): lab, fname, run = path['title'], path['fullpath'], path['feffrun'] amp, e0, delr, sigma2, third, ei = path['amp'], path['e0'], path['delr'], path['sigma2'], path['third'], path['ei'] script.append(f"""## Path '{lab}' : ############ -#_feffcache['paths']['{lab}'] = feffpath('{fname}', -# label='{lab}', feffrun='{run}', degen=1) +#_feffcache['paths']['{lab}'] = feffpath('{fname}', label='{lab}', feffrun='{run}', degen=1) #_feffpaths['{lab}'] = use_feffpath(_feffcache['paths'], '{lab}', # s02='{amp:s}', e0='{e0:s}', deltar='{delr:s}', # sigma2='{sigma2:s}', third='{third:s}', ei='{ei:s}')""") @@ -1532,10 +1584,10 @@ def onFitModel(self, event=None, dgroup=None): self.larch_eval(COMMANDS['do_feffit'].format(**fopts)) self.wids['show_results'].Enable() - self.onPlot(dgroup=opts['datagroup'], build_fitmodel=False, + self.onPlot(dataset_name='_feffit_dataset', pargroup_name='_feffit_result.paramgroup', - paths_name='_feffit_dataset.paths', - pathsum_name='_feffit_dataset.model') + build_fitmodel=False) + script.extend(self.get_session_history()[nstart:]) script.extend(["print(feffit_report(_feffit_result))", @@ -1669,32 +1721,33 @@ def build(self): minsize=(350, -1), colour=COLORS['title'], style=LEFT) - wids['plotone_op'] = Choice(panel, choices=list(PlotOne_Choices.keys()), + wids['plot1_op'] = Choice(panel, choices=list(Plot1_Choices.keys()), action=self.onPlot, size=(125, -1)) - wids['plotone_op'].SetSelection(1) - wids['plotalt_op'] = Choice(panel, choices=list(PlotAlt_Choices.keys()), + wids['plot1_op'].SetSelection(1) + wids['plot2_op'] = Choice(panel, choices=list(Plot2_Choices.keys()), action=self.onPlot, size=(125, -1)) - wids['plot_win'] = Choice(panel, choices=PlotWindowChoices, + wids['plot2_win'] = Choice(panel, choices=PlotWindowChoices, action=self.onPlot, size=(60, -1)) - wids['plot_win'].SetStringSelection('2') + wids['plot2_win'].SetStringSelection('2') - ppanel = wx.Panel(panel) - ppanel.SetMinSize((450, 20)) - wids['plot_paths'] = Check(ppanel, default=False, label='Plot Each Path', - action=self.onPlot) - wids['plot_ftwindows'] = Check(ppanel, default=False, label='Plot FT Windows', - action=self.onPlot) + wids['plot_kw'] = Choice(panel, size=(80, -1), + choices=['0', '1', '2', '3', '4'], default=2) - wids['plot_voffset'] = FloatSpin(ppanel, value=0, digits=2, increment=0.25, - action=self.onPlot, size=(100, -1)) + wids['plot1_paths'] = Check(panel, default=False, label='Plot Each Path', + action=self.onPlot) + wids['plot1_ftwins'] = Check(panel, default=False, label='Plot FT Windows', + action=self.onPlot) - psizer = wx.BoxSizer(wx.HORIZONTAL) - psizer.Add( wids['plot_paths'], 0, 2) - psizer.Add( wids['plot_ftwindows'], 0, 2) - psizer.Add(SimpleText(ppanel, ' Offset'), 0, 2) - psizer.Add( wids['plot_voffset'], 0, 2) - pack(ppanel, psizer) + wids['plot1_voff'] = FloatSpin(panel, value=0, digits=2, increment=0.25, + action=self.onPlot, size=(100, -1)) + wids['plot2_paths'] = Check(panel, default=False, label='Plot Each Path', + action=self.onPlot) + wids['plot2_ftwins'] = Check(panel, default=False, label='Plot FT Windows', + action=self.onPlot) + + wids['plot2_voff'] = FloatSpin(panel, value=0, digits=2, increment=0.25, + action=self.onPlot, size=(100, -1)) wids['plot_current'] = Button(panel,'Plot Current Model', action=self.onPlot, size=(175, -1)) @@ -1723,13 +1776,25 @@ def build(self): irow += 1 sizer.Add(wids['plot_current'], (irow, 0), (1, 1), LEFT) - sizer.Add(wids['plotone_op'], (irow, 1), (1, 1), LEFT) - sizer.Add(ppanel, (irow, 2), (1, 3), LEFT) + sizer.Add(wids['plot1_op'], (irow, 1), (1, 1), LEFT) + sizer.Add(SimpleText(panel, 'k-weight'), (irow, 2), (1, 1), LEFT) + sizer.Add(wids['plot_kw'], (irow, 3), (1, 1), LEFT) + irow += 1 + sizer.Add(wids['plot1_ftwins'], (irow, 0), (1, 1), LEFT) + sizer.Add(wids['plot1_paths'], (irow, 1), (1, 1), LEFT) + sizer.Add(SimpleText(panel, 'Vertical Offest:'), (irow, 2), (1, 1), LEFT) + sizer.Add(wids['plot1_voff'], (irow, 3), (1, 1), LEFT) + irow += 1 sizer.Add(SimpleText(panel, 'Add Second Plot:', style=LEFT), (irow, 0), (1, 1), LEFT) - sizer.Add(wids['plotalt_op'], (irow, 1), (1, 1), LEFT) + sizer.Add(wids['plot2_op'], (irow, 1), (1, 1), LEFT) sizer.Add(SimpleText(panel, 'Plot Window:', style=LEFT), (irow, 2), (1, 1), LEFT) - sizer.Add(wids['plot_win'], (irow, 3), (1, 1), LEFT) + sizer.Add(wids['plot2_win'], (irow, 3), (1, 1), LEFT) + irow += 1 + sizer.Add(wids['plot2_ftwins'], (irow, 0), (1, 1), LEFT) + sizer.Add(wids['plot2_paths'], (irow, 1), (1, 1), LEFT) + sizer.Add(SimpleText(panel, 'Vertical Offest:'), (irow, 2), (1, 1), LEFT) + sizer.Add(wids['plot2_voff'], (irow, 3), (1, 1), LEFT) irow += 1 sizer.Add(wids['show_pathpars'], (irow, 0), (1, 1), LEFT) @@ -1918,19 +1983,24 @@ def onRemoveFromHistory(self, event=None): self.show_results() def onPlot(self, event=None): - opts = {'build_fitmodel': False} - for key, meth in (('plot_ftwindows', 'IsChecked'), - ('plot_paths', 'IsChecked'), - ('plotone_op', 'GetStringSelection'), - ('plotalt_op', 'GetStringSelection'), - ('plot_win', 'GetStringSelection'), - ('plot_voffset', 'GetValue')): + for key, meth in (('plot1_ftwins', 'IsChecked'), + ('plot2_ftwins', 'IsChecked'), + ('plot1_paths', 'IsChecked'), + ('plot2_paths', 'IsChecked'), + ('plot1_op', 'GetStringSelection'), + ('plot2_op', 'GetStringSelection'), + ('plot1_voff', 'GetValue'), + ('plot2_voff', 'GetValue'), + ('plot_kw', 'GetStringSelection'), + ('plot2_win', 'GetStringSelection'), + ): opts[key] = getattr(self.wids[key], meth)() - opts['plotone_op'] = PlotOne_Choices[opts['plotone_op']] - opts['plotalt_op'] = PlotAlt_Choices[opts['plotalt_op']] - opts['plot_win'] = int(opts['plot_win']) + opts['plot1_op'] = Plot1_Choices[opts['plot1_op']] + opts['plot2_op'] = Plot2_Choices[opts['plot2_op']] + opts['plot2_win'] = int(opts['plot2_win']) + opts['plot_kw'] = int(opts['plot_kw']) result = self.get_fitresult() if result is None: @@ -1946,11 +2016,9 @@ def onPlot(self, event=None): result_name = f'{self.datagroup.groupname}.feffit_history[{self.nfit}]' opts['label'] = f'{result_name}.label' + opts['dataset_name'] = f'{result_name}.datasets[0]' opts['pargroup_name'] = f'{result_name}.paramgroup' - opts['paths_name'] = f'{result_name}.datasets[0].paths' - opts['pathsum_name'] = f'{result_name}.datasets[0].model' - opts['dataset_name'] = f'{result_name}.datasets[0].data' - opts['dgroup'] = dgroup + opts['title'] = f'{self.datagroup.filename}: {result.label}' for attr in ('kmin', 'kmax', 'dk', 'rmin', 'rmax', 'fitspace'): @@ -1958,7 +2026,6 @@ def onPlot(self, event=None): opts['fit_kwstring'] = "%s" % getattr(trans, 'kweight') opts['kwindow'] = getattr(trans, 'window') opts['topwin'] = self - self.feffit_panel.onPlot(**opts) diff --git a/larch/wxxas/prepeak_panel.py b/larch/wxxas/prepeak_panel.py index 7f2b608ec..eb3f239d7 100644 --- a/larch/wxxas/prepeak_panel.py +++ b/larch/wxxas/prepeak_panel.py @@ -37,7 +37,7 @@ DVSTYLE = dv.DV_SINGLE|dv.DV_VERT_RULES|dv.DV_ROW_LINES ModelChoices = {'other': ('', 'Constant', 'Linear', - 'Quadratic', 'Exponential', 'PowerLaw' + 'Quadratic', 'Exponential', 'PowerLaw', 'Linear Step', 'Arctan Step', 'ErrorFunction Step', 'Logistic Step', 'Rectangle'), 'peaks': ('', 'Gaussian', 'Lorentzian', @@ -732,7 +732,7 @@ def build_display(self): self.showresults_btn = Button(pan, 'Show Fit Results', action=self.onShowResults, size=(150, -1)) self.showresults_btn.Disable() - + self.fitbline_btn = Button(pan,'Fit Baseline', action=self.onFitBaseline, size=(150, -1)) diff --git a/larch/wxxas/xas_dialogs.py b/larch/wxxas/xas_dialogs.py index a94b2bdad..3b2d6739a 100644 --- a/larch/wxxas/xas_dialogs.py +++ b/larch/wxxas/xas_dialogs.py @@ -69,7 +69,8 @@ def add_floatspin(name, value, panel, with_pin=True, xasmain=None, relative_e0=relative_e0, callback=callback) fspin, pinb = FloatSpinWithPin(panel, value=value, - pin_action=pin_action, **kws) + pin_action=pin_action, + **kws) else: fspin = FloatSpin(panel, value=value, **kws) pinb = None @@ -744,7 +745,7 @@ def plot_results(self, event=None, keep_limits=True): ylabel=plotlabels.mu, **opts) xold, yold = self.dgroup.energy, self.dgroup.mu - ppanel.oplot(xold, yold, zorder=10, + ppanel.oplot(xold, yold, zorder=10, marker='o', markersize=4, linewidth=2.0, label='original', show_legend=True, **opts) if keep_limits: @@ -945,7 +946,7 @@ def plot_results(self, event=None, keep_limits=True): ylabel=plotlabels.mu, **opts) xold, yold = self.dgroup.energy, self.dgroup.mu - ppanel.oplot(xold, yold, zorder=10, + ppanel.oplot(xold, yold, zorder=10, marker='o', markersize=4, linewidth=2.0, label='original', show_legend=True, **opts) if keep_limits: @@ -1103,6 +1104,7 @@ def __init__(self, parent, controller, **kws): self.parent = parent self.controller = controller self.wids = {} + self.plot_markers = None self.dgroup = self.controller.get_group() groupnames = list(self.controller.file_groups.keys()) @@ -1137,10 +1139,6 @@ def __init__(self, parent, controller, **kws): undo = Button(panel, 'Undo remove', size=(125, -1), action=self.on_undo) - #wids['apply'] = Button(panel, 'Save / Overwrite', size=(150, -1), - # action=self.on_apply) - #SetTip(wids['apply'], '''Save deglitched, overwrite current arrays, -#clear undo history''') wids['save_as'] = Button(panel, 'Save As New Group: ', size=(150, -1), action=self.on_saveas) @@ -1151,15 +1149,20 @@ def __init__(self, parent, controller, **kws): self.history_message = SimpleText(panel, '') - opts = dict(size=(125, -1), digits=2, increment=0.1, action=None) + opts = dict(size=(125, -1), digits=2, increment=0.1) for wname in ('xlast', 'range1', 'range2'): - if wname == 'range2': lastx += 1 - pin_callback = partial(self.on_pinvalue, opt=wname) - fspin, pinbtn = add_floatspin(wname, lastx, panel, - with_pin=True, xasmain=self.parent, - callback=pin_callback, **opts) + if wname == 'range2': + lastx += 1 + pin_action = partial(self.parent.onSelPoint, opt=wname, + relative_e0=False, + callback=self.on_pinvalue) + + float_action=partial(self.on_floatvalue, opt=wname) + fspin, pinb = FloatSpinWithPin(panel, value=lastx, + pin_action=pin_action, + action=float_action) wids[wname] = fspin - wids[wname+'_pin'] = pinbtn + wids[wname+'_pin'] = pinb self.choice_range = Choice(panel, choices=('above', 'below', 'between'), size=(90, -1), action=self.on_rangechoice) @@ -1221,6 +1224,7 @@ def reset_data_history(self): plottype = DEGLITCH_PLOTS[plotstr] self.data = self.get_xydata(datatype=plottype) self.xmasks = [np.ones(len(self.data[0]), dtype=bool)] + self.plot_markers = None def get_xydata(self, datatype='mu'): if hasattr(self.dgroup, 'energy'): @@ -1262,6 +1266,12 @@ def on_plotchoice(self, event=None): def on_pinvalue(self, opt='__', xsel=None, **kws): if xsel is not None and opt in self.wids: self.wids[opt].SetValue(xsel) + self.plot_markers = opt + self.plot_results() + + def on_floatvalue(self, val=None, opt='_', **kws): + self.plot_markers = opt + self.plot_results() def on_remove(self, event=None, opt=None): xwork, ywork = self.data @@ -1378,6 +1388,26 @@ def ek_formatter(x, pos): if plottype in ('chie', 'chiew'): ppanel.axes.xaxis.set_major_formatter(FuncFormatter(ek_formatter)) + if self.plot_markers is not None: + rchoice = self.choice_range.GetStringSelection().lower() + xwork, ywork = self.data + opts = dict(marker='o', markersize=6, zorder=2, label='_nolegend_', + markerfacecolor='#66000022', markeredgecolor='#440000') + if self.plot_markers == 'xlast': + bad = index_nearest(xwork, self.wids['xlast'].GetValue()) + ppanel.axes.plot([xwork[bad]], [ywork[bad]], **opts) + else: + bad = index_nearest(xwork, self.wids['range1'].GetValue()) + if rchoice == 'above': + ppanel.axes.plot([xwork[bad:]], [ywork[bad:]], **opts) + elif rchoice == 'below': + ppanel.axes.plot([xwork[:bad+1]], [ywork[:bad+1]], **opts) + elif rchoice == 'between': + bad2 = index_nearest(xwork, self.wids['range2'].GetValue()) + ppanel.axes.plot([xwork[bad:bad2+1]], + [ywork[bad:bad2+1]], **opts) + + ppanel.canvas.draw() self.history_message.SetLabel('%i items in history' % (len(self.xmasks)-1)) diff --git a/larch/wxxas/xasgui.py b/larch/wxxas/xasgui.py index c7da36410..ad599d524 100644 --- a/larch/wxxas/xasgui.py +++ b/larch/wxxas/xasgui.py @@ -356,6 +356,8 @@ def Btn(msg, x, act): select_action=self.ShowFile, remove_action=self.RemoveFile) set_color(self.controller.filelist, 'list_fg', bg='list_bg') + # self.controller.filelist.check_event = self.filelist_check_event + self.controller.filelist.Bind(wx.EVT_CHECKLISTBOX, self.filelist_check_event) tsizer = wx.BoxSizer(wx.HORIZONTAL) tsizer.Add(sel_all, 1, LEFT|wx.GROW, 1) @@ -428,6 +430,15 @@ def RemoveFile(self, fname=None, **kws): group = self.controller.file_groups.pop(s) self.controller.sync_xasgroups() + def filelist_check_event(self, evt=None): + """MN 2024-Feb this is included to better 'swallow' the checked event, + so that it does in fact run ShowFile(). + This could be removed eventually, as wxutils will also no longer run + filelist.SetSelection()""" + index = evt.GetSelection() + label = evt.GetString() + pass + def ShowFile(self, evt=None, groupname=None, process=True, filename=None, plot=True, **kws): if filename is None and evt is not None: @@ -446,7 +457,6 @@ def ShowFile(self, evt=None, groupname=None, process=True, if (getattr(dgroup, 'datatype', 'raw').startswith('xa') and not (hasattr(dgroup, 'norm') and hasattr(dgroup, 'e0'))): self.process_normalization(dgroup, force=True, use_form=False) - if filename is None: filename = dgroup.filename self.current_filename = filename diff --git a/larch/wxxas/xasnorm_panel.py b/larch/wxxas/xasnorm_panel.py index 6c000cb79..7d18da72d 100644 --- a/larch/wxxas/xasnorm_panel.py +++ b/larch/wxxas/xasnorm_panel.py @@ -8,7 +8,6 @@ import numpy as np from functools import partial - from xraydb import guess_edge, atomic_number from larch.utils import gformat @@ -62,6 +61,19 @@ FSIZE = 120 FSIZEBIG = 175 +def get_auto_nnorm(config): + "autoamatically set nnorm from range" + norm1 = config['norm1'] + norm2 = config['norm2'] + nrange = abs(norm2 - norm1) + nnorm = 2 + if nrange < 350: + nnorm = 1 + if nrange < 50: + norm = 0 + return nnorm + + class XASNormPanel(TaskPanel): """XAS normalization Panel""" def __init__(self, parent, controller=None, **kws): @@ -151,6 +163,18 @@ def build_display(self): sx.Add(self.wids['auto_step'], 0, LEFT, 4) pack(step_panel, sx) + # step row + nnorm_panel = wx.Panel(panel) + self.wids['nnorm'] = Choice(nnorm_panel, choices=list(NNORM_CHOICES.keys()), + size=(150, -1), action=self.onNNormChoice, + default=2) + self.wids['auto_nnorm'] = Check(nnorm_panel, default=True, label='auto?', + action=self.onAuto_NNORM) + + sx = wx.BoxSizer(wx.HORIZONTAL) + sx.Add(self.wids['nnorm'], 0, LEFT, 4) + sx.Add(self.wids['auto_nnorm'], 0, LEFT, 4) + pack(nnorm_panel, sx) self.wids['energy_ref'] = Choice(panel, choices=['None'], action=self.onEnergyRef, size=(300, -1)) @@ -159,9 +183,6 @@ def build_display(self): size=(100, -1), action=self.onNormMethod, default=0) - self.wids['nnorm'] = Choice(panel, choices=list(NNORM_CHOICES.keys()), - size=(150, -1), action=self.onNormMethod, - default=0) opts = {'size': (FSIZE, -1), 'digits': 2, 'increment': 5.0, 'action': self.onSet_Ranges, 'min_val':-99000, 'max_val':99000} @@ -212,7 +233,7 @@ def build_display(self): action=self.onFreezeGroup) use_auto = Button(panel, 'Use Default Settings', size=(200, -1), - action=self.onAutoNorm) + action=self.onResetNorm) copy_auto = Button(panel, 'Copy', size=(60, -1), action=self.onCopyAuto) @@ -278,7 +299,7 @@ def CopyBtn(name): add_text('Norm Energy range: ') panel.Add(nor_panel, dcol=2) panel.Add(SimpleText(panel, 'Polynomial Type:'), newrow=True) - panel.Add(self.wids['nnorm'], dcol=3) + panel.Add(nnorm_panel, dcol=3) panel.Add(HLine(panel, size=(HLINEWID, 3)), dcol=4, newrow=True) panel.Add(self.wids['is_frozen'], newrow=True) @@ -300,22 +321,16 @@ def get_config(self, dgroup=None): return self.get_defaultconfig() self.read_form() + defconf = self.get_defaultconfig() conf = getattr(dgroup.config, self.configname, defconf) + for k, v in defconf.items(): if k not in conf: conf[k] = v if conf.get('edge_step', None) is None: conf['edge_step'] = getattr(dgroup, 'edge_step', 1) - # update config from callargs - last call arguments - callargs = getattr(dgroup, 'callargs', None) - if callargs is not None: - pre_callargs = getattr(callargs, 'pre_edge', None) - if pre_callargs is not None: - for attr in ('e0', 'norm', 'nvict', 'pre1', 'pre2', 'norm1', 'norm2'): - update_confval(conf, pre_callargs, attr) - atsym = '?' if hasattr(dgroup, 'element'): elem = getattr(dgroup, 'element', '?') @@ -337,7 +352,6 @@ def get_config(self, dgroup=None): conf['atsym'] = atsym if atsym == '?': conf['atsym'] = getattr(dgroup, 'atsym', atsym) - conf['edge'] = getattr(dgroup,'edge', conf['edge']) xeref = getattr(dgroup, 'energy_ref', '') @@ -361,6 +375,7 @@ def get_config(self, dgroup=None): conf['atsym'] = atsym conf['edge'] = edge + if hasattr(dgroup, 'mback_params'): conf['atsym'] = getattr(dgroup.mback_params, 'atsym', conf['atsym']) conf['edge'] = getattr(dgroup.mback_params, 'edge', conf['edge']) @@ -409,7 +424,9 @@ def fill_form(self, dgroup): self.wids['nvict'].SetStringSelection("%d" % opts['nvict']) self.wids['show_e0'].SetValue(opts['show_e0']) self.wids['auto_e0'].SetValue(opts['auto_e0']) + self.wids['auto_nnorm'].SetValue(opts.get('auto_nnorm', 0)) self.wids['auto_step'].SetValue(opts['auto_step']) + self.wids['edge'].SetStringSelection(opts['edge'].title()) self.wids['atsym'].SetStringSelection(opts['atsym'].title()) self.wids['norm_method'].SetStringSelection(opts['norm_method'].lower()) @@ -439,17 +456,19 @@ def fill_form(self, dgroup): wx.CallAfter(self.unset_skip_process) def set_nnorm_widget(self, nnorm=None): - nnorm_str = 'auto' - if nnorm is not None: - try: - nnorm = int(nnorm) - except ValueError: - nnorm = None + if nnorm in (None, 'auto'): + nnorm = nnorm_default = get_auto_nnorm(self.get_config()) + + try: + nnorm = int(nnorm) + except ValueError: + nnorm = nnorm_default - for k, v in NNORM_CHOICES.items(): - if v == nnorm: - nnorm_str = k + for k, v in NNORM_CHOICES.items(): + if v == nnorm: + nnorm_str = k self.wids['nnorm'].SetStringSelection(nnorm_str) + self.wids['auto_nnorm'].SetValue(0) def unset_skip_process(self): self.skip_process = False @@ -471,7 +490,8 @@ def read_form(self): form_opts['plotone_op'] = self.plotone_op.GetStringSelection() form_opts['plotsel_op'] = self.plotsel_op.GetStringSelection() form_opts['plot_voff'] = self.wids['plot_voff'].GetValue() - for ch in ('show_e0', 'show_pre', 'show_norm', 'auto_e0', 'auto_step'): + for ch in ('show_e0', 'show_pre', 'show_norm', 'auto_e0', + 'auto_step', 'auto_nnorm'): form_opts[ch] = self.wids[ch].IsChecked() form_opts['norm_method'] = self.wids['norm_method'].GetStringSelection().lower() @@ -481,9 +501,18 @@ def read_form(self): form_opts['energy_ref'] = self.wids['energy_ref'].GetStringSelection() return form_opts + def onNNormChoice(self, evt=None): + auto_nnorm = self.wids['auto_nnorm'].SetValue(0) + self.onNormMethod() + def onNormMethod(self, evt=None): method = self.wids['norm_method'].GetStringSelection().lower() + auto_nnorm = self.wids['auto_nnorm'].GetValue() + nnorm = NNORM_CHOICES.get(self.wids['nnorm'].GetStringSelection(), None) + if nnorm is None: + nnorm = get_auto_nnorm(self.get_config()) + nvict = int(self.wids['nvict'].GetStringSelection()) self.update_config({'norm_method': method, 'nnorm': nnorm, 'nvict': nvict}) if method.startswith('mback'): @@ -503,8 +532,9 @@ def _set_frozen(self, frozen): dgroup.is_frozen = frozen except: pass - for wattr in ('e0', 'step', 'pre1', 'pre2', 'norm1', 'norm2', - 'nvict', 'nnorm', 'show_e0', 'auto_e0', 'auto_step', + + for wattr in ('e0', 'step', 'pre1', 'pre2', 'norm1', 'norm2', 'nvict', + 'nnorm', 'show_e0', 'auto_e0', 'auto_step', 'auto_nnorm', 'norm_method', 'edge', 'atsym', 'show_pre', 'show_norm'): self.wids[wattr].Enable(not frozen) @@ -613,18 +643,25 @@ def onPlotSel(self, evt=None): wx.CallAfter(self.controller.set_focus) - def onAutoNorm(self, evt=None): - defaults = self.get_defaultconfig() - norm1 = defaults['norm1'] - norm2 = defaults['norm2'] - nnorm = 2 - if (norm2-norm1 < 350): nnorm = 1 - if (norm2-norm1 < 50): nnorm = 0 + def onAuto_NNORM(self, evt=None): + if evt.IsChecked(): + nnorm = get_auto_nnorm(self.get_config()) + self.set_nnorm_widget(nnorm) + self.wids['auto_nnorm'].SetValue(0) + time.sleep(0.001) + wx.CallAfter(self.onReprocess) + def onResetNorm(self, evt=None): + auto_nnorm = self.wids['auto_nnorm'].GetValue() + if auto_nnorm: + nnorm = get_auto_nnorm(self.get_config()) self.set_nnorm_widget(nnorm) - self.wids['norm_method'].SetSelection(0) + + defaults = self.get_defaultconfig() + self.wids['auto_step'].SetValue(1) self.wids['auto_e0'].SetValue(1) + self.wids['auto_e0'].SetValue(1) self.wids['nvict'].SetSelection(0) for attr in ('pre1', 'pre2', 'norm1', 'norm2'): self.wids[attr].SetValue(defaults[attr]) @@ -800,7 +837,6 @@ def onReprocess(self, evt=None, value=None, **kws): def process(self, dgroup=None, force_mback=False, force=False, use_form=True, **kws): """ handle process (pre-edge/normalize) of XAS data from XAS form """ - if self.skip_process and not force: return if dgroup is None: @@ -809,7 +845,6 @@ def process(self, dgroup=None, force_mback=False, force=False, use_form=True, ** return self.skip_process = True - conf = self.get_config(dgroup) form = self.read_form() if not use_form: @@ -848,12 +883,18 @@ def process(self, dgroup=None, force_mback=False, force=False, use_form=True, ** dgroup.xdat = dgroup.energy = res.energy dgroup.energy_units = en_units - if hasattr(dgroup, 'e0') and form['atsym'] == '?': - form['atsym'], form['edge'] = guess_edge(dgroup.e0) + if not hasattr(dgroup, 'e0'): + e0 = find_e0(dgroup) + if form['atsym'] == '?' and conf.get('atsym', '?') != '?': + form['atsym'] = conf['atsym'] + form['edge'] = conf.get('edge', 'K') + if form['atsym'] == '?': + form['atsym'], form['edge'] = guess_edge(dgroup.e0) dgroup.atsym = form['atsym'] dgroup.edge = form['edge'] + cmds = [] # test whether the energy shift is 0 or is different from the current energy shift: ediff = 8.42e14 # just a huge energy step/shift @@ -946,7 +987,6 @@ def process(self, dgroup=None, force_mback=False, force=False, use_form=True, ** atsym, edge = guess_edge(dgroup.e0) conf['atsym'] = dgroup.atsym = atsym conf['edge'] = dgroup.edge = edge - self.wids['atsym'].SetStringSelection(dgroup.atsym) self.wids['edge'].SetStringSelection(dgroup.edge) @@ -964,6 +1004,7 @@ def process(self, dgroup=None, force_mback=False, force=False, use_form=True, ** self.update_config(conf, dgroup=dgroup) wx.CallAfter(self.unset_skip_process) + def get_plot_arrays(self, dgroup): lab = plotlabels.norm if dgroup is None: @@ -983,7 +1024,7 @@ def get_plot_arrays(self, dgroup): if not hasattr(dgroup, 'scale'): dgroup.scale = 1.0 - dgroup.norm = dgroup.ydat*dgroup.scale + dgroup.norm = dgroup.norm*dgroup.scale if pchoice == 'dmude': dgroup.plot_ylabel = 'dy/dx' dgroup.plot_yarrays = [('dmude', PLOTOPTS_1, 'dy/dx')] @@ -996,12 +1037,12 @@ def get_plot_arrays(self, dgroup): elif pchoice == 'norm+dmude': lab = plotlabels.norm dgroup.plot_y2label = 'dy/dx' - dgroup.plot_yarrays = [('ydat', PLOTOPTS_1, 'y'), + dgroup.plot_yarrays = [('norm', PLOTOPTS_1, 'y'), ('dmude', PLOTOPTS_D, 'dy/dx')] elif pchoice == 'norm+d2mude': lab = plotlabels.norm dgroup.plot_y2label = 'd2y/dx2' - dgroup.plot_yarrays = [('ydat', PLOTOPTS_1, 'y'), + dgroup.plot_yarrays = [('norm', PLOTOPTS_1, 'y'), ('d2normde', PLOTOPTS_D, 'd2y/dx2')] return @@ -1033,12 +1074,12 @@ def get_plot_arrays(self, dgroup): elif pchoice == 'norm+i0': lab = plotlabels.norm dgroup.plot_y2label = lab2 = plotlabels.i0 - dgroup.plot_yarrays = [('ydat', PLOTOPTS_1, lab), + dgroup.plot_yarrays = [('norm', PLOTOPTS_1, lab), ('i0', PLOTOPTS_D, lab2)] elif pchoice == 'norm+flat': lab = plotlabels.norm dgroup.plot_y2label = lab2 = plotlabels.flat - dgroup.plot_yarrays = [('ydat', PLOTOPTS_1, lab), + dgroup.plot_yarrays = [('norm', PLOTOPTS_1, lab), ('flat', PLOTOPTS_D, lab2)] elif pchoice == 'mback_norm': req_attrs.append('mback_norm') @@ -1090,7 +1131,6 @@ def get_plot_arrays(self, dgroup): ival = min(len(y4e0)-1, index_of(dgroup.energy, dgroup.e0 + val)) dgroup.plot_extras.append(('marker', dgroup.e0+val, y4e0[ival], popts)) - def plot(self, dgroup, title=None, plot_yarrays=None, yoff=0, delay_draw=True, multi=False, new=True, with_extras=True, **kws): @@ -1168,7 +1208,6 @@ def plot(self, dgroup, title=None, plot_yarrays=None, yoff=0, narr = len(plot_yarrays) - 1 _linewidth = popts['linewidth'] - for i, pydat in enumerate(plot_yarrays): yaname, yopts, yalabel = pydat popts.update(yopts) diff --git a/larch/wxxrd/XRD2Dviewer.py b/larch/wxxrd/XRD2Dviewer.py index acc8bff82..40b244a85 100644 --- a/larch/wxxrd/XRD2Dviewer.py +++ b/larch/wxxrd/XRD2Dviewer.py @@ -36,7 +36,7 @@ return_ai,twth_from_xy,q_from_xy,eta_from_xy) from .XRDCalibrationFrame import CalibrationPopup from .XRDMaskFrame import MaskToolsPopup -from .XRD1Dviewer import Calc1DPopup +# from .XRD1Dviewer import Calc1DPopup ################################### @@ -655,8 +655,8 @@ def saveIMAGE(self,event=None,raw=False): tifffile.imsave(path,self.plt_img) def on1DXRD(self,event=None): - - read, save, plot = False, False, False + pass + """ read, save, plot = False, False, False if self.calfile is not None and self.plt_img is not None: myDlg = Calc1DPopup(self,self.plt_img) if myDlg.ShowModal() == wx.ID_OK: @@ -735,7 +735,7 @@ def on1DXRD(self,event=None): data1dxrd.xrd_from_2d([q,cnts],'q') self.xrddisplay1D.xrd1Dviewer.add1Ddata(data1dxrd) self.xrddisplay1D.Show() - + """ @@ -931,7 +931,7 @@ def onClose(self, event=None): except: pass - + def onExit(self, event=None): dlg = wx.MessageDialog(None, 'Really Quit?', 'Question', wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION) diff --git a/larch/wxxrd/xrd1d_display.py b/larch/wxxrd/xrd1d_display.py index 6e681eb54..6a8ef7acd 100644 --- a/larch/wxxrd/xrd1d_display.py +++ b/larch/wxxrd/xrd1d_display.py @@ -41,7 +41,7 @@ Button, HLine, Choice, Check, MenuItem, COLORS, set_color, CEN, RIGHT, LEFT, FRAMESTYLE, Font, FONTSIZE, FONTSIZE_FW, FileSave, FileOpen, - flatnotebook, Popup, FileCheckList, + flatnotebook, Popup, FileCheckList, OkCancel, EditableListBox, ExceptionPopup, CIFFrame, LarchFrame, LarchWxApp) @@ -235,6 +235,35 @@ def onDone(self, event=None): self.callback(self.wids['newname'].GetValue()) self.Destroy() +class ResetMaskDialog(wx.Dialog): + """dialog for wavelength/energy""" + def __init__(self, parent): + wx.Dialog.__init__(self, parent, wx.ID_ANY, size=(350, 300), + title="Unset Mask?") + self.SetFont(Font(FONTSIZE)) + panel = GridPanel(self, ncols=3, nrows=4, pad=4, itemstyle=LEFT) + + self.wids = wids = {} + + warn_msg = 'This will remove the current mask!' + + panel.Add(SimpleText(panel, warn_msg), dcol=2) + + panel.Add(OkCancel(panel), dcol=2, newrow=True) + panel.pack() + + sizer = wx.BoxSizer(wx.VERTICAL) + sizer.Add(panel, 1, LEFT, 5) + pack(self, sizer) + self.Fit() + w0, h0 = self.GetSize() + w1, h1 = self.GetBestSize() + self.SetSize((max(w0, w1)+25, max(h0, h1)+25)) + + def GetResponse(self): + self.Raise() + return (self.ShowModal() == wx.ID_OK) + class XRD1DFrame(wx.Frame): """browse 1D XRD patterns""" @@ -261,6 +290,7 @@ def __init__(self, parent=None, wavelength=1.0, ponifile=None, self.cif_browser = None self.img_display = None self.plot_display = None + self.mask = None self.datasets = {} self.form = {} self.createMenus() @@ -297,20 +327,34 @@ def createMenus(self): MenuItem(self, fmenu, "&Quit\tCtrl+Q", "Quit program", self.onClose) MenuItem(self, smenu, "Browse AmMin Crystal Structures", - "Browse Structures from Am Min Database", + "Browse Structures from American Mineralogical Database", self.onCIFBrowse) MenuItem(self, cmenu, "Read PONI Calibration File", - "Read PONI Calibration (pyFAI) FIle", + "Read PONI Calibration (pyFAI) File", self.onReadPONI) - MenuItem(self, cmenu, "Set Energy / Wavelength", + MenuItem(self, cmenu, "Set Energy/Wavelength", "Set Energy and Wavelength", self.onSetWavelength) + MenuItem(self, cmenu, "Set Mask for imported Images", + "Read Mask for Imported TIFF XRD Images", self.onReadMask) + + m = MenuItem(self, cmenu, "Unset Mask", + "Reset to use no mask for Imported TIFF XRD Images", + self.onUnsetMask) + self.unset_mask_menu = m + m.Enable(False) + + m = MenuItem(self, cmenu, "Show Mask Image", + "Show image of mask", self.onShowMask) + self.show_mask_menu = m + m.Enable(False) + menubar = wx.MenuBar() menubar.Append(fmenu, "&File") - menubar.Append(cmenu, "&Calibration") + menubar.Append(cmenu, "&Calibration and Mask") menubar.Append(smenu, "&Search CIF Structures") self.SetMenuBar(menubar) @@ -377,6 +421,41 @@ def onReadXY(self, event=None): dxrd = xrd1d(file=sfile, wavelength=self.wavelength) self.add_data(dxrd, label=xfile) + def onUnsetMask(self, event=None): + if self.mask is not None: + dlg = ResetMaskDialog(self) + if dlg.GetResponse(): + self.mask = None + self.unset_mask_menu.Enable(False) + self.show_mask_menu.Enable(False) + + def onReadMask(self, event=None): + sfile = FileOpen(self, 'Read Mask Image File', + default_file='XRD.mask', + default_dir=get_cwd(), + wildcard="Mask Files(*.mask)|*.mask|All files (*.*)|*.*") + + if sfile is not None: + valid_mask = False + try: + img = tifffile.imread(sfile) + valid_mask = len(img.shape)==2 and img.max() == 1 and img.min() == 0 + except: + valid_mask = False + if valid_mask: + self.mask = (1 - img[::-1, :]).astype(img.dtype) + self.unset_mask_menu.Enable(True) + self.show_mask_menu.Enable(True) + else: + title = "Could not use mask file" + message = [f"Could not use {sfile:s} as a mask file"] + o = ExceptionPopup(self, title, message) + + def onShowMask(self, event=None): + if self.mask is not None: + imd = self.get_imdisplay() + imd.display(self.mask, colomap='gray', auto_contrast=True) + def onReadTIFF(self, event=None): sfile = FileOpen(self, 'Read TIFF XRD Image', default_file='XRD.tiff', @@ -395,6 +474,15 @@ def onReadTIFF(self, event=None): img = tifffile.imread(sfile) img = img[::-1, :] + if self.mask is not None: + if (self.mask.shape == img.shape): + img = img*self.mask + else: + title = "Could not apply current mask" + message = [f"Could not apply current mask [shape={self.mask.shape}]", + f"to this XRD image [shape={img.shape}]"] + o = ExceptionPopup(self, title, message) + if (img.max() > MAXVAL_INT16) and (img.max() < MAXVAL_INT16 + 64): #probably really 16bit data img[np.where(img>MAXVAL_INT16)] = 0 diff --git a/larch/xafs/feffdat.py b/larch/xafs/feffdat.py index bc0a34299..46124532a 100644 --- a/larch/xafs/feffdat.py +++ b/larch/xafs/feffdat.py @@ -230,8 +230,8 @@ def __init__(self, filename=None, label='', feffrun='', s02=None, degen=None, self.shell = 'K' self.absorber = None self._feffdat = _feffdat - - self.hashkey = 'p000' + self.dataset = 'd001' + self.hashkey = 'p001' self.k = None self.chi = None @@ -322,13 +322,13 @@ def __geom2label(self): rep.extend(atom) rep.append("%7.4f" % self._feffdat.reff) s = "|".join([str(i) for i in rep]) - return "p%s" % (b32hash(s)[:9].lower()) + return "p%s" % (b32hash(s)[:8].lower()) def pathpar_name(self, parname): """ get internal name of lmfit Parameter for a path paramter, using Path's hashkey """ - return f'{parname}_{self.hashkey}' + return f'{parname}_{self.dataset}_{self.hashkey}' def __copy__(self): newpath = FeffPathGroup() @@ -362,7 +362,7 @@ def rmass(self, val): pass def __repr__(self): return f'' - def create_path_params(self, params=None): + def create_path_params(self, params=None, dataset=None): """ create Path Parameters within the current lmfit.Parameters namespace """ @@ -372,7 +372,8 @@ def create_path_params(self, params=None): self.params = params if self.params is None: self.params = Parameters() - + if dataset is not None: + self.dataset = dataset if (not isinstance(self.params, Parameters) and isinstance(self.params, dict)): self.params = dict2params(self.params) diff --git a/larch/xafs/feffit.py b/larch/xafs/feffit.py index ba12293d5..dccdaf357 100644 --- a/larch/xafs/feffit.py +++ b/larch/xafs/feffit.py @@ -361,10 +361,10 @@ def prepare_fit(self, params, other_hashkeys=None): else: self.estimate_noise(chi=self._chi, rmin=15.0, rmax=30.0) - # if not refining the background, and if delta_chi (uncertainty in - # chi(k) from autobk or other source) exists, add it in quadrature - # to high-k noise estimate, and update epsilon_k to be this value - if not self.refine_bkg and hasattr(self.data, 'delta_chi'): + # if delta_chi (uncertainty in chi(k) from autobk or other source) + # exists, add it in quadrature to high-k noise estimate, and + # update epsilon_k to be this value + if hasattr(self.data, 'delta_chi'): cur_eps_k = getattr(self, 'epsilon_k', 0.0) if isinstance(cur_eps_k, (list, tuple)): eps_ave = 0. @@ -378,20 +378,20 @@ def prepare_fit(self, params, other_hashkeys=None): _dchi = interp(self.model.k, self.data.k, _dchi) self.set_epsilon_k(np.sqrt(_dchi**2 + cur_eps_k**2)) + self.__generate_hashkey(other_hashkeys=other_hashkeys) # for each path in the list of paths, setup the Path Parameters # to use the current Parameters namespace if isinstance(params, Group): params = group2params(params) for label, path in self.paths.items(): - path.create_path_params(params=params) + path.create_path_params(params=params, dataset=self.hashkey) if path.spline_coefs is None: path.create_spline_coefs() - self.__generate_hashkey(other_hashkeys=other_hashkeys) self.bkg_spline = {} if self.refine_bkg: trans.rbkg = max(trans.rbkg, trans.rmin) - trans.rmin = 0. + trans.rmin = trans.rstep self.n_idp = 1 + 2*(trans.rmax)*(trans.kmax-trans.kmin)/pi nspline = 1 + round(2*(trans.rbkg)*(trans.kmax-trans.kmin)/pi) knots_k = np.linspace(trans.kmin, trans.kmax, nspline) @@ -715,8 +715,8 @@ def feffit(paramgroup, datasets, rmax_out=10, path_outputs=True, scale_covar=False, **fit_kws) result = fit.leastsq() - # params2group(result.params, work_paramgroup) - dat = concatenate([d._residual(result.params, data_only=True) for d in datasets]) + dat = concatenate([d._residual(result.params, data_only=True) + for d in datasets]) n_idp = 0 for ds in datasets: @@ -855,94 +855,47 @@ def feffit_report(result, min_correl=0.1, with_paths=True, _larch=None): def getval(attr): return getfloat_attr(result, attr) - out.append(f" n_function_calls = {getval('nfev')}") - out.append(f" n_variables = {getval('nvarys')}") - out.append(f" n_data_points = {getval('ndata')}") - out.append(f" n_independent = {getval('n_independent')}") - out.append(f" chi_square = {getval('chi_square')}") - out.append(f" reduced chi_square = {getval('chi2_reduced')}") - out.append(f" r-factor = {getval('rfactor')}") - out.append(f" Akaike info crit = {getval('aic')}") - out.append(f" Bayesian info crit = {getval('bic')}") - out.append(' ') - if len(datasets) == 1: - out.append(header % 'Dataset') - else: - out.append(header % 'Datasets (%i)' % len(datasets)) - for i, ds in enumerate(datasets): - if not hasattr(ds, 'epsilon_k'): - ds.prepare_fit(params) - tr = ds.transform - if isinstance(tr.kweight, Iterable): - if isinstance(ds.epsilon_k[0], np.ndarray): - msg = [] - for eps in ds.epsilon_k: - msg.append('Array(mean=%s, std=%s)' % (gfmt(eps.mean()).strip(), - gfmt(eps.std()).strip())) - eps_k = ', '.join(msg) - else: - eps_k = ', '.join([gfmt(eps).strip() for eps in ds.epsilon_k]) - eps_r = ', '.join([gfmt(eps).strip() for eps in ds.epsilon_r]) - kweigh = ', '.join(['%i' % kwe for kwe in tr.kweight]) - eps_k = eps_k.strip() - eps_r = eps_r.strip() - kweigh = kweigh.strip() - else: - if isinstance(ds.epsilon_k, np.ndarray): - eps_k = 'Array(mean=%s, std=%s)' % (gfmt(ds.epsilon_k.mean()).strip(), - gfmt(ds.epsilon_k.std()).strip()) - else: - eps_k = gfmt(ds.epsilon_k).strip() - eps_r = gfmt(ds.epsilon_r).strip() - kweigh = '%i' % tr.kweight - out.append(f" unique_id = '{ds.hashkey}'") - out.append(f" fit space = '{tr.fitspace}'") - if ds.refine_bkg: - out.append(f" r_bkg (refine bkg) = {tr.rbkg:.3f}") - out.append(f" r-range = {tr.rmin:.3f}, {tr.rmax:.3f}") - out.append(f" k-range = {tr.kmin:.3f}, {tr.kmax:.3f}") - kwin = f" k window, dk = '{tr.window}', {tr.dk:.3f}" - if tr.dk2 is not None: - kwin += f", {tr.dk2:.3f}" - out.append(kwin) - pathfiles = [p.filename for p in ds.paths.values()] - out.append(f" paths used in fit = {repr(pathfiles)}") - out.append(f" k-weight = {kweigh}") - out.append(f" epsilon_k = {eps_k}") - out.append(f" epsilon_r = {eps_r}") - out.append(f" n_independent = {ds.n_idp:.3f}") - # + def add_string(label, value, llen=20): + if len(label) < llen: + label = (label + ' '*llen)[:llen] + out.append(f" {label} = {value}") + + add_string('n_function_calls', getval('nfev')) + add_string('n_variables', getval('nvarys')) + add_string('n_data_points', getval('ndata')) + add_string('n_independent', getval('n_independent')) + add_string('chi_square', getval('chi_square')) + add_string('reduced chi_square', getval('chi2_reduced')) + add_string('r-factor', getval('rfactor')) + add_string('Akaike info crit', getval('aic')) + add_string('Bayesian info crit', getval('bic')) + out.append(' ') out.append(header % 'Variables') for name, par in params.items(): if any([name.endswith('_%s' % phash) for phash in path_hashkeys]): continue - if len(name) < 14: - name = (name + ' '*14)[:14] - if isParameter(par): if par.vary: stderr = 'unknown' if par.stderr is not None: stderr = gfmt(par.stderr) - out.append(varformat % (name, gfmt(par.value), - stderr, gfmt(par.init_value))) + add_string(name, f"{gfmt(par.value)} +/-{stderr} (init={gfmt(par.init_value)})") elif par.expr is not None: stderr = 'unknown' if par.stderr is not None: stderr = gfmt(par.stderr) - out.append(exprformat % (name, gfmt(par.value), - stderr, par.expr)) + add_string(name, f"{gfmt(par.value)} +/-{stderr} = '{par.expr}'") else: - out.append(fixformat % (name, gfmt(par.value))) + add_string(name, f"{gfmt(par.value)} (fixed)") covar_vars = result.var_names if len(covar_vars) > 0: out.append(' ') out.append(header % 'Correlations' + - ' (unreported correlations are < % .3f)' % min_correl) + ' (unreported correlations are < % .3f)' % min_correl) correls = {} for i, name in enumerate(covar_vars): par = params[name] @@ -956,18 +909,61 @@ def getval(attr): sort_correl = sorted(correls.items(), key=lambda it: abs(it[1])) sort_correl.reverse() for name, val in sort_correl: - if abs(val) < min_correl: - break - if len(name) < 20: - name = (name + ' '*20)[:20] - out.append(' %s = % .3f' % (name, val)) + if abs(val) > min_correl: + vv = f"{val:+.3f}".replace('+', ' ') + add_string(name, vv) - if with_paths: - out.append(' ') - out.append(header % 'Paths') - for ids, ds in enumerate(datasets): - if len(datasets) > 1: - out.append(' dataset %i:' % (ids+1)) + out.append(' ') + for i, ds in enumerate(datasets): + if not hasattr(ds, 'epsilon_k'): + ds.prepare_fit(params) + tr = ds.transform + if isinstance(tr.kweight, Iterable): + if isinstance(ds.epsilon_k[0], np.ndarray): + msg = [] + for eps in ds.epsilon_k: + msg.append('Array(mean=%s, std=%s)' % (gfmt(eps.mean()).strip(), + gfmt(eps.std()).strip())) + eps_k = ', '.join(msg) + else: + eps_k = ', '.join([gfmt(eps).strip() for eps in ds.epsilon_k]) + eps_r = ', '.join([gfmt(eps).strip() for eps in ds.epsilon_r]) + kweigh = ', '.join(['%i' % kwe for kwe in tr.kweight]) + eps_k = eps_k.strip() + eps_r = eps_r.strip() + kweigh = kweigh.strip() + else: + if isinstance(ds.epsilon_k, np.ndarray): + eps_k = 'Array(mean=%s, std=%s)' % (gfmt(ds.epsilon_k.mean()).strip(), + gfmt(ds.epsilon_k.std()).strip()) + else: + eps_k = gfmt(ds.epsilon_k).strip() + eps_r = gfmt(ds.epsilon_r).strip() + kweigh = '%i' % tr.kweight + extra = f" {i+1} of {len(datasets)}" if len(datasets) > 1 else "" + + out.append(f"[[Dataset{extra}]]") + add_string('unique_id', f"'{ds.hashkey}'") + add_string('fit space', f"'{tr.fitspace}'") + if ds.refine_bkg: + add_string('r_bkg (refine bkg)', f"{tr.rbkg:.3f}") + add_string('r-range', f"{tr.rmin:.3f}, {tr.rmax:.3f}") + add_string('k-range', f"{tr.kmin:.3f}, {tr.kmax:.3f}") + kwin = f"'{tr.window}', {tr.dk:.3f}" + if tr.dk2 is not None: + kwin += f", {tr.dk2:.3f}" + add_string('k window, dk', kwin) + pathfiles = repr([p.filename for p in ds.paths.values()]) + add_string('paths used in fit', pathfiles) + add_string('k-weight', kweigh) + add_string('epsilon_k', eps_k) + add_string('epsilon_r', eps_r) + add_string('n_independent', f"{ds.n_idp:.3f}") + # + + if with_paths: + out.append(' ') + out.append(header % 'Paths') for label, path in ds.paths.items(): out.append('%s\n' % path.report()) out.append('='*len(topline)) diff --git a/larch/xafs/feffrunner.py b/larch/xafs/feffrunner.py index b96a350a8..c8aad9f45 100644 --- a/larch/xafs/feffrunner.py +++ b/larch/xafs/feffrunner.py @@ -197,7 +197,7 @@ def write(msg): self.message_writer(line) ## snarf threshold energy - pattern = re.compile('mu_(new|old)=\s+(-?\d\.\d+)') + pattern = re.compile(r'mu_(new|old)=\s+(-?\d\.\d+)') match = pattern.search(line) if match is not None: self.threshold.append(match.group(2)) diff --git a/larch/xafs/pre_edge.py b/larch/xafs/pre_edge.py index 4f3668c13..ddc639eb7 100644 --- a/larch/xafs/pre_edge.py +++ b/larch/xafs/pre_edge.py @@ -17,11 +17,9 @@ @Make_CallArgs(["energy","mu"]) def find_e0(energy, mu=None, group=None, _larch=None): - """calculate :math:`E_0`, the energy threshold of absorption, or - 'edge energy', given :math:`\mu(E)`. + """calculate E0, the energy threshold of absorption, or 'edge energy', given mu(E). - :math:`E_0` is found as the point with maximum derivative with - some checks to avoid spurious glitches. + E0 is found as the point with maximum derivative with some checks to avoid spurious glitches. Arguments: energy (ndarray or group): array of x-ray energies, in eV, or group diff --git a/larch/xrd/cif2feff.py b/larch/xrd/cif2feff.py index 3519b675d..5f37c8df1 100644 --- a/larch/xrd/cif2feff.py +++ b/larch/xrd/cif2feff.py @@ -1,13 +1,14 @@ import os -import random +from random import Random from io import StringIO - from xraydb import atomic_symbol, atomic_number, xray_edge from larch.utils import fix_varname, strict_ascii, gformat from .amcsd_utils import PMG_CIF_OPTS, CifParser, Molecule, SpacegroupAnalyzer +rng = Random() + def get_atom_map(structure): """generalization of pymatgen atom map Returns: @@ -88,7 +89,8 @@ def cif_sites(ciftext, absorber=None): def cif2feffinp(ciftext, absorber, edge=None, cluster_size=8.0, absorber_site=1, - site_index=None, extra_titles=None, with_h=False, version8=True): + site_index=None, extra_titles=None, with_h=False, + version8=True, rng_seed=None): """convert CIF text to Feff8 or Feff6l input file Arguments @@ -103,6 +105,7 @@ def cif2feffinp(ciftext, absorber, edge=None, cluster_size=8.0, absorber_site=1, extra_titles (list of str or None): extra title lines to include [None] with_h (bool): whether to include H atoms [False] version8 (bool): whether to write Feff8l input (see Note 5)[True] + rng_seed (int or None): seed for RNG to get reproducible occupancy selections [None] Returns ------- text of Feff input file @@ -129,6 +132,10 @@ def cif2feffinp(ciftext, absorber, edge=None, cluster_size=8.0, absorber_site=1, except ValueError: return '# could not read CIF file' + global rng + if rng_seed is not None: + rng.seed(rng_seed) + sgroup = SpacegroupAnalyzer(cstruct).get_symmetry_dataset() space_group = sgroup["international"] @@ -156,7 +163,6 @@ def cif2feffinp(ciftext, absorber, edge=None, cluster_size=8.0, absorber_site=1, atlist = ', '.join(atoms_map.keys()) raise ValueError(f'atomic symbol {absorber:s} not listed in CIF data: ({atlist})') - site_atoms = {} # map xtal site with list of atoms occupying that site site_tags = {} absorber_count = 0 @@ -165,7 +171,7 @@ def cif2feffinp(ciftext, absorber, edge=None, cluster_size=8.0, absorber_site=1, if len(site_species) > 1: s_els = [s.symbol for s in site.species.keys()] s_wts = [s for s in site.species.values()] - site_atoms[sindex] = random.choices(s_els, weights=s_wts, k=1000) + site_atoms[sindex] = rng.choices(s_els, weights=s_wts, k=1000) site_tags[sindex] = f'({site.species_string:s})_{1+sindex:d}' else: site_atoms[sindex] = [site_species[0]] * 1000 diff --git a/larch/xrd/structure2feff.py b/larch/xrd/structure2feff.py index 282beca7c..93bab183b 100644 --- a/larch/xrd/structure2feff.py +++ b/larch/xrd/structure2feff.py @@ -1,11 +1,12 @@ import os -import random - -from .amcsd_utils import (SpacegroupAnalyzer, Molecule, IMolecule, IStructure) +from random import Random from xraydb import atomic_symbol, atomic_number, xray_edge from larch.utils.strutils import fix_varname, strict_ascii +from .amcsd_utils import (SpacegroupAnalyzer, Molecule, IMolecule, IStructure) + +rng = Random() def get_atom_map(structure): """generalization of pymatgen atom map @@ -98,8 +99,9 @@ def parse_structure(structure_text, fmt='cif', fname="default.filename"): return {'formula': struct.composition.reduced_formula, 'sites': struct.sites, 'structure_text': structure_text, 'fmt': fmt, 'fname': fname} -def structure2feffinp(structure_text, absorber, edge=None, cluster_size=8.0, absorber_site=1, - site_index=None, extra_titles=None, with_h=False, version8=True, fmt='cif'): +def structure2feffinp(structure_text, absorber, edge=None, cluster_size=8.0, + absorber_site=1, site_index=None, extra_titles=None, + with_h=False, version8=True, fmt='cif', rng_seed=None): """convert structure text to Feff8 or Feff6l input file Arguments @@ -115,6 +117,7 @@ def structure2feffinp(structure_text, absorber, edge=None, cluster_size=8.0, abs with_h (bool): whether to include H atoms [False] version8 (bool): whether to write Feff8l input (see Note 5)[True] fmt (string): format of structure file (cif, poscar, etc) [cif] + rng_seed (int or None): seed for RNG to get reproducible occupancy selections [None] Returns ------- text of Feff input file @@ -141,6 +144,10 @@ def structure2feffinp(structure_text, absorber, edge=None, cluster_size=8.0, abs except ValueError: return '# could not read structure file' + global rng + if rng_seed is not None: + rng.seed(rng_seed) + is_molecule = False if isinstance(struct, IStructure): @@ -184,7 +191,7 @@ def structure2feffinp(structure_text, absorber, edge=None, cluster_size=8.0, abs if len(site_species) > 1: s_els = [s.symbol for s in site.species.keys()] s_wts = [s for s in site.species.values()] - site_atoms[sindex] = random.choices(s_els, weights=s_wts, k=1000) + site_atoms[sindex] = rng.choices(s_els, weights=s_wts, k=1000) site_tags[sindex] = f'({site.species_string:s})_{1+sindex:d}' else: site_atoms[sindex] = [site_species[0]] * 1000