From bd14581969c0a05d50b2ded9d06ab02f2e2db1ba Mon Sep 17 00:00:00 2001 From: jennmald Date: Tue, 14 Apr 2026 15:19:11 -0400 Subject: [PATCH 1/7] adding a pixi environment --- .gitattributes | 2 + .gitignore | 3 + README.md | 6 + pixi.lock | 11759 +++++++++++++++++++++++++++++++++++++++++++++++ pixi.toml | 52 + pyproject.toml | 2 +- 6 files changed, 11823 insertions(+), 1 deletion(-) create mode 100644 pixi.lock create mode 100644 pixi.toml diff --git a/.gitattributes b/.gitattributes index 9ddddbc..afc6680 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,3 @@ pyCHX/_version.py export-subst +# SCM syntax highlighting & preventing 3-way merges +pixi.lock merge=binary linguist-language=YAML linguist-generated=true diff --git a/.gitignore b/.gitignore index aac0eaf..93b82de 100755 --- a/.gitignore +++ b/.gitignore @@ -99,3 +99,6 @@ __enamlcache__/ # PyBuilder target/ +# pixi environments +.pixi/* +!.pixi/config.toml diff --git a/README.md b/README.md index dac7075..f41cbac 100644 --- a/README.md +++ b/README.md @@ -12,4 +12,10 @@ conda create --name pyCHX python=3.6 numpy scipy matplotlib source activate pyCHX pip install -r https://raw.githubusercontent.com/NSLS-II-CHX/pyCHX/master/requirements.txt pip install git+https://github.com/NSLS-II-CHX/pyCHX + + +Pixi Install: +- pixi install: installs only main dependencies +- pixi install --with dev: installs main and dev dependencies +- pixi run --with dev pytest: runs pytest with dev tols available ``` diff --git a/pixi.lock b/pixi.lock new file mode 100644 index 0000000..4f9dd02 --- /dev/null +++ b/pixi.lock @@ -0,0 +1,11759 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/_x86_64-microarch-level-1-3_x86_64.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/adbc-driver-manager-1.11.0-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-postgresql-1.8.0-pyha770c72_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-sqlite-1.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiofiles-25.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiosqlite-0.22.1-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alembic-1.18.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-doc-0.0.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/area-detector-handlers-0.0.10-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-25.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-25.1.0-py310h7c4b9e2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/asgi-correlation-id-4.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asteval-1.0.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/astropy-6.1.7-py310hf462985_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/astropy-base-6.1.7-he5c6ecd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/astropy-iers-data-0.2026.4.13.0.58.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/async-timeout-5.0.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/asyncpg-0.31.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-26.1.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/awkward-2.9.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/awkward-cpp-52-py310h7f712d7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.9.1-h194c533_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.9.8-h346e085_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.12.5-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.1-h7e655bb_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.6-h1deb5b9_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.10.7-had4b759_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.23.2-hbff472d_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.13.3-h8ba2272_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.8.6-h493c25d_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.4-h7e655bb_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.7-h7e655bb_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.35.0-h719b17a_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.606-h522d481_6.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.16.1-h3a458e0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.13.2-h3a5f585_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.15.0-h2a74896_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.11.0-h3d7a050_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.13.0-hf38f1be_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bluesky-tiled-plugins-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/boltons-25.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hea6c23e_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-he3183e4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.2-h3122c55_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-7.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-h3394656_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/canonicaljson-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py310he7384ee_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.3-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.3.2-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.2-py310h3788b33_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cryptography-46.0.7-py310hb288b08_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py310ha58568a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py310h7c4b9e2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dill-0.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/doct-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.1-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ecdsa-0.19.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/echo-0.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email-validator-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email_validator-2.3.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/event-model-1.23.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fast-histogram-0.14-py310hf779ad0_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-0.135.3-hbd727af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.23-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-core-0.135.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fasteners-0.19-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.62.0-py310h3406613_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.3-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/freetype-py-2.3.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/glue-core-1.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.3.0-py310h63ebcad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.4.0-py310h25320af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.16.0-nompi_py310h4aa865e_102.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.2.0-h15599e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5plugin-5.1.0-py310h887a449_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/historydict-1.2.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/httptools-0.7.1-py310h7c4b9e2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/humanize-4.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.12.30-py310h78a9a29_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imageio-2.37.0-pyhfb79c49_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.8.0-h8f7a5dd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.37.0-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/json-merge-patch-0.2-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpointer-3.1.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.9.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-hd590300_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.5.0-py310haaf941d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_102.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.1.0-hdb68285_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20250512.1-cxx17_hba17884_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-postgresql-1.8.0-h6eab0cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-sqlite-1.11.0-hcea63bf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-22.0.0-h91d8edf_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-22.0.0-h635bf11_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-compute-22.0.0-h8c2c5c3_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-22.0.0-h635bf11_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-22.0.0-h3f74fd7_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.4.1-hcfa2d63_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-6_h4a7cf45_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-6_h0358290_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.3-default_h746c552_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h86f0d12_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.5-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.3-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.3-h73754d4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.2-h32235b2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.39.0-hdb79228_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.39.0-hdbdcf42_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.73.1-h3288cfb_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.4.1-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-h6cb5226_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-6_h47877c9_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.3-hf7376ad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.3-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.68.1-h877daf1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.32-pthreads_h94d23a6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.21.0-hb9b0907_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-headers-1.21.0-ha770c72_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-22.0.0-h7376487_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.57-h421ea60_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.7-h5c52fec_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-6.31.1-h49aed37_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2025.11.05-h7b12aa8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.53.0-h0c1763c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.22.0-h454ac66_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.11.3-hfe17d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.42-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.51.0-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-ha9997c6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-h26afc86_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.43-h711ed8c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.2-h25fd6f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.47.0-py310hee1c697_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lmfit-1.3.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.5-py310hde1b0b5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py310h3406613_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.10.8-py310hff52083_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py310hfde16b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/minio-7.2.20-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongomock-4.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongoquery-1.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mpc-1.4.0-he0a73b1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.2-he0a73b1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mpl-scatter-density-0.8-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py310h03d9f68_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.19.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ndindex-1.10.1-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nomkl-1.0-h5ca1d4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.65.0-py310h225f558_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py310h5eaa309_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numexpr-2.14.1-py310h34a7263_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.6-py310hefbff90_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/obstore-0.9.2-py310hdfeec95_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h55fea9a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py310h05b0c27_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.2-h35e630c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.2.1-hd747db4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orjson-3.11.8-py310hfe99b16_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py310h0158d43_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.46-h1321c63_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py310h6557065_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pims-0.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prettytable-3.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py310h139afa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/py-cpuinfo-9.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-22.0.0-py310hff52083_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-22.0.0-py310h923f568_2_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pycairo-1.29.0-py310h8c3e0f7_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pycryptodome-3.23.0-py310he45356f_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.46.0-py310hd8f68c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.11.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.13.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyerfa-2.0.1.5-py310h32771cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pymongo-4.16.0-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.9.2-py310h2007e60_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.20-h3c07f61_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-blosc2-2.7.1-py310h8713f2e_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-1.3.2-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-jose-3.5.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.26-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2026.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.10-8_cp310.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2026.1.post1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py310h3406613_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.9.2-h994258b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.8.1-h1fbca29_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2025.11.05-h5301d42_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/redis-py-7.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.37.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/reportlab-4.4.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.33.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-15.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.19.7-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rlpycairo-0.4.0-pyh6c17108_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.30.0-py310hd8f68c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.6.0-h8399546_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.25.2-py310h0158d43_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.2-py310h1d65ade_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sentinels-1.0.0-py_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py310hc8bbb35_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/shellingham-1.5.4-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/slicerator-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sparse-0.17.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.49-py310h139afa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stamina-26.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/starlette-1.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/suitcase-mongo-0.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-4.0.1-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.1.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2025.5.10-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.5-py310h7c4b9e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.3-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typer-0.24.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzlocal-5.3.1-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uncertainties-3.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py310h7c4b9e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.44.0-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.44.0-h4457471_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/uvloop-0.22.1-py310h7c4b9e2_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchfiles-1.1.1-py310hdfeec95_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.25.0-hd6090a7_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/websockets-16.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.6.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-h4f16b4b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xlrd-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.2-h25fd6f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.2.5-hde8ca8f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.25.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: git+https://github.com/NSLS2/chxtools?rev=main#04a3a27ceefa2436839e00f980be3f65aa4155e5 + - pypi: git+https://github.com/NSLS-II-CHX/eiger-io?rev=master#cb13bdc336e445697e6483556116aaba0368a5d3 + - pypi: https://files.pythonhosted.org/packages/fd/15/a70400eeea394dbcf34f926713499c3f7334b2ee0ec3639b794491ffc9c0/fabio-2025.10.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ef/4a/ac0f195f52fae450338cae90234588a2ead2337440b4e5ff7230775477a3/lxml-6.0.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: git+https://github.com/ChrisBeaumont/mpl-modest-image?rev=master#4174514a9ce7f4160fb6cbd200df6897694e0ac3 + - pypi: https://files.pythonhosted.org/packages/82/83/7dafb09fbc3efe9d00c4667d22b32b53d08e8a676fa164c6dd8f5debe85e/pyepics-3.5.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a0/e3/913ca30973886fa9ef5f1a78cf6a5a206ee76d8da2033ad67c4b8beec6b2/pyfai-2026.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b4/8c/4065950f9d013c4b2e588fe33cf04e564c2322842d84dbcbce5ba1dc28b0/PyQt5-5.15.11-cp38-abi3-manylinux_2_17_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/9a/46/ffe177f99f897a59dc237a20059020427bd2d3853d713992b8081933ddfe/pyqt5_qt5-5.15.18-py3-none-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/6c/4a/c66dfd090d93ef6f3e30093b55bf786c24608258d157cb1951e28c5a2725/pyqt5_sip-12.18.0-cp310-cp310-manylinux1_x86_64.manylinux_2_5_x86_64.whl + - pypi: git+https://github.com/scikit-beam/scikit-beam?rev=main#dbe344435f6b12749104b868f7e251624acee565 + - pypi: https://files.pythonhosted.org/packages/65/8a/2b46cb76762468deea3dbcc5370c858d60e5b7bdaf09bdccd0169707147c/silx-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: git+https://github.com/Nikea/xray-vision?rev=master#fc01c7bf7ca25fc2e824ed6f16481b4fa78a79a6 + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/_openmp_mutex-4.5-7_kmp_llvm.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiofiles-25.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alembic-1.18.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-doc-0.0.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/area-detector-handlers-0.0.10-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/asgi-correlation-id-4.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asteval-1.0.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-26.1.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.9.1-h753d554_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.9.8-hca30140_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.12.5-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.1-h61d5560_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.6-hada8b3e_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.10.7-h241dc44_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.23.2-hcea795d_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.13.3-hf26a141_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.8.6-h1e3b5a0_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.4-h61d5560_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.7-h61d5560_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.35.0-h44e95eb_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.606-hf3ce6b4_6.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.16.1-h88fedcc_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.13.2-h853621b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.15.0-h10d327b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.11.0-h7e4aa5d_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.13.0-hb288d13_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/blosc-1.21.6-h7dd00d9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/boltons-25.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310h1af2607_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brunsli-0.1-h97083b6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.6-hc919400_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-blosc2-2.19.1-h9c47b6e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-7.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachey-0.2.1-pyh9f0ad1d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.4-he0f2337_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-2.0.0-py310hf5b66c1_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/charls-2.4.3-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.3.2-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.2-py310h7f4e7e6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-46.0.7-py310h12cab78_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cython-3.2.4-py310h9a762d2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.1.0-py310h72544b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0b38-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dill-0.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/doct-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ecdsa-0.19.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email-validator-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email_validator-2.3.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/event-model-1.23.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-0.135.3-hbd727af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.23-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-core-0.135.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.17.1-h2b252f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.62.0-py310hb46c203_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.14.3-hce30654_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/freetype-py-2.3.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/giflib-5.2.2-h93a5062_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmp-6.3.0-h7bae524_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmpy2-2.3.0-py310h6ac7f53_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.4.0-py310h19b6747_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.16.0-nompi_py310h0c5f886_102.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.6-nompi_had3affe_108.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/historydict-1.2.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/httptools-0.7.1-py310hfe3a0ae_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/humanize-4.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.3-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/imagecodecs-2025.3.30-py310hc9b329b_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imageio-2.37.0-pyhfb79c49_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.37.0-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.9.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jxrlib-1.1-h93a5062_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.5.0-py310h34990b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.22.2-h385eeb1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.18-hdfa7624_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.1.0-h1eee2c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20250512.1-cxx17_hd41c47c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libaec-1.1.5-h8664d51_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-22.0.0-h7239961_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-22.0.0-hc317990_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-compute-22.0.0-h75845d1_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-22.0.0-hc317990_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-22.0.0-h144af7f_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.4.1-hfce71f6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.11.0-6_h51639a9_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.11.0-6_hb0561ab_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.19.0-hd5a2499_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-22.1.3-h55c6f16_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libevent-2.1.12-h2757513_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.5-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.14.3-hce30654_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.14.3-hdfa99f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgcc-15.2.0-hcbb3090_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.2.0-h07b0088_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.2.0-hdae7583_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.39.0-head0a95_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.39.0-hfa3a374_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.73.1-h3063b79_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libhwy-1.3.0-h48b13b8_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.4.1-h84a0fba_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjxl-0.11.1-h7274d02_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.11.0-6_hd9741b5_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.3-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.68.1-h8f3e76b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.32-openmp_he657e61_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.21.0-he15edb5_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.21.0-hce30654_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-22.0.0-h0ac143b_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libpng-1.6.57-h132b30e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-6.31.1-h98f38fd_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2025.11.05-h91c62da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.53.0-h1b79a29_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h1590b86_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.22.0-h14a376c_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.1-h7dc4979_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.11.3-h2431656_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.51.0-h6caf38d_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libwebp-base-1.6.0-h07db88b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.17.0-hdb1d25a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-16-2.15.2-h5ef1a60_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.15.2-h8d039ee_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.2-h8088a28_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzopfli-1.0.3-h9f76cd9_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-22.1.3-hc7d1edf_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.47.0-py310h4137262_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lmfit-1.3.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.5-py310h36fcd3f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.3-py310hb46c203_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-3.10.8-py310hb6292c7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.8-py310h0181960_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongomock-4.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongoquery-1.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpc-1.4.0-h169892a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpfr-4.2.2-h6bc93b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.2-py310h0e897d2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.19.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ndindex-1.8-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.12.0-h784d473_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.65.0-py310h71bca05_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.2.6-py310h4d83441_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.4-hd9e9057_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openpyxl-3.1.5-py310hb1d31aa_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.2-hd24854e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.2.1-h4fd0076_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orjson-3.11.8-py310h38230ac_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.3.3-py310h25f4b65_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-12.0.0-py310hcac772a_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pims-0.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.46.4-h81086ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prettytable-3.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.2.2-py310haea493c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-22.0.0-py310hb6292c7_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-22.0.0-py310h92b138f_2_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pycairo-1.29.0-py310h6464c50_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.46.0-py310h9365ca8_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.11.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.13.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pymongo-4.16.0-py310h8616463_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.20-h1b19095_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-jose-3.5.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.26-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2026.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.10-8_cp310.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2026.1.post1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pywavelets-1.8.0-py310hc12b6d3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.3-py310hb46c203_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.8.1-h8246384_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2025.11.05-h64b956e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.37.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/reportlab-4.4.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-15.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.19.7-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rlpycairo-0.4.0-pyh6c17108_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.30.0-py310hf3301a5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-image-0.25.2-py310h25f4b65_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.15.2-py310h32ab4ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sentinels-1.0.0-py_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/shellingham-1.5.4-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/slicerator-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.2-hada39a4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sparse-0.17.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.49-py310haea493c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/starlette-1.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/suitcase-mongo-0.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-4.0.1-h0cb729a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2025.5.10-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.1.0a117-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.5-py310h72544b6_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.3-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typer-0.24.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzlocal-5.3.1-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uncertainties-3.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-17.0.1-py310h72544b6_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.44.0-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.44.0-h4457471_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/uvloop-0.22.1-py310hfe3a0ae_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchfiles-1.1.1-py310h53169e7_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/watchgod-0.7-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/websockets-16.0-py310haea493c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.6.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.12-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.5-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h925e9cb_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zfp-1.0.1-ha86207d_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.2-h8088a28_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-ng-2.2.5-h3470cca_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.25.0-py310hf151d32_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda + - pypi: git+https://github.com/NSLS2/chxtools?rev=main#04a3a27ceefa2436839e00f980be3f65aa4155e5 + - pypi: git+https://github.com/NSLS-II-CHX/eiger-io?rev=master#cb13bdc336e445697e6483556116aaba0368a5d3 + - pypi: https://files.pythonhosted.org/packages/c0/d1/5ddb92494517fa5453c46872a87fd0d0e4a497543b353ce0aad6466f406a/fabio-2025.10.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/9c/13/15017f6210bfea843316d62f0f121e364e17bb129444ed803a256a213036/hdf5plugin-6.0.0-py3-none-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/c6/b9/93d71026bf6c4dfe3afc32064a3fcd533d9032c8b97499744a999f97c230/lxml-6.0.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/ChrisBeaumont/mpl-modest-image?rev=master#4174514a9ce7f4160fb6cbd200df6897694e0ac3 + - pypi: https://files.pythonhosted.org/packages/f3/89/6b07977baf2af75fb6692f9e7a1fb612a15f600fc921f3f565366de01f4a/numexpr-2.14.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/82/83/7dafb09fbc3efe9d00c4667d22b32b53d08e8a676fa164c6dd8f5debe85e/pyepics-3.5.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/56/22/9e726b9537ec8ebe9981dc3dce07d4e72b059ad0546bc19ca16771ee23a5/pyfai-2026.3.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/11/64/42ec1b0bd72d87f87bde6ceb6869f444d91a2d601f2e67cd05febc0346a1/PyQt5-5.15.11-cp38-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/24/8e/76366484d9f9dbe28e3bdfc688183433a7b82e314216e9b14c89e5fab690/pyqt5_qt5-5.15.18-py3-none-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2c/8e/9db5c0756134a6501ecf7d472a92f64d2d0b8033b4597e73f138a4cfb605/pyqt5_sip-12.18.0-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/scikit-beam/scikit-beam?rev=main#dbe344435f6b12749104b868f7e251624acee565 + - pypi: https://files.pythonhosted.org/packages/5f/bc/6a9aaa3e605cfe7e73856c9067ba42cd6289660eb7a6d31970c7f9cca725/silx-2.2.2-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/Nikea/xray-vision?rev=master#fc01c7bf7ca25fc2e824ed6f16481b4fa78a79a6 + dev: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/_x86_64-microarch-level-1-3_x86_64.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/adbc-driver-manager-1.11.0-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-postgresql-1.8.0-pyha770c72_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-sqlite-1.11.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiofiles-25.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiosqlite-0.22.1-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alembic-1.18.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-doc-0.0.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/area-detector-handlers-0.0.10-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-25.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-25.1.0-py310h7c4b9e2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/asgi-correlation-id-4.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asteval-1.0.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/astropy-6.1.7-py310hf462985_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/astropy-base-6.1.7-he5c6ecd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/astropy-iers-data-0.2026.4.13.0.58.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/async-timeout-5.0.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/asyncpg-0.31.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-26.1.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/awkward-2.9.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/awkward-cpp-52-py310h7f712d7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.9.1-h194c533_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.9.8-h346e085_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.12.5-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.1-h7e655bb_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.6-h1deb5b9_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.10.7-had4b759_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.23.2-hbff472d_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.13.3-h8ba2272_8.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.8.6-h493c25d_7.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.4-h7e655bb_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.7-h7e655bb_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.35.0-h719b17a_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.606-h522d481_6.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.16.1-h3a458e0_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.13.2-h3a5f585_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.15.0-h2a74896_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.11.0-h3d7a050_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.13.0-hf38f1be_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/backports-1.0-pyhd8ed1ab_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/backports.tarfile-1.2.0-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/black-26.3.1-pyha5154f8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bluesky-tiled-plugins-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/boltons-25.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hea6c23e_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-he3183e4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.2-h3122c55_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-7.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-h3394656_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/canonicaljson-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py310he7384ee_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.3-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.3.2-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cmarkgfm-2024.11.20-py310h7c4b9e2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/codecov-2.1.13-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.2-py310h3788b33_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.5-py310h3406613_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.10.20-py310hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cryptography-46.0.7-py310hb288b08_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py310ha58568a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py310h7c4b9e2_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dill-0.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/doct-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.1-h5888daf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ecdsa-0.19.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/echo-0.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email-validator-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email_validator-2.3.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/event-model-1.23.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fast-histogram-0.14-py310hf779ad0_4.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-0.135.3-hbd727af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.23-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-core-0.135.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fasteners-0.19-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.25.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flake8-7.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.62.0-py310h3406613_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.3-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/freetype-py-2.3.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/glue-core-1.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.3.0-py310h63ebcad_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.4.0-py310h25320af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.16.0-nompi_py310h4aa865e_102.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.2.0-h15599e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5plugin-5.1.0-py310h887a449_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/historydict-1.2.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/httptools-0.7.1-py310h7c4b9e2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/humanize-4.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/id-1.6.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.12.30-py310h78a9a29_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imageio-2.37.0-pyhfb79c49_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.8.0-h8f7a5dd_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.37.0-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/isort-8.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.classes-3.4.0-pyhcf101f3_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.context-6.1.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.functools-4.4.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jeepney-0.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/json-merge-patch-0.2-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonpointer-3.1.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.9.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.9.1-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-hd590300_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/keyring-25.7.0-pyha804496_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.5.0-py310haaf941d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_102.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.1.0-hdb68285_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20250512.1-cxx17_hba17884_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-postgresql-1.8.0-h6eab0cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-sqlite-1.11.0-hcea63bf_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-22.0.0-h91d8edf_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-22.0.0-h635bf11_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-compute-22.0.0-h8c2c5c3_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-22.0.0-h635bf11_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-22.0.0-h3f74fd7_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.4.1-hcfa2d63_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-6_h4a7cf45_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-6_h0358290_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.3-default_h746c552_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h86f0d12_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.5-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.3-ha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.3-h73754d4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.2-h32235b2_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.39.0-hdb79228_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.39.0-hdbdcf42_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.73.1-h3288cfb_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.4.1-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-h6cb5226_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-6_h47877c9_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.3-hf7376ad_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.3-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.68.1-h877daf1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.32-pthreads_h94d23a6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.21.0-hb9b0907_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-headers-1.21.0-ha770c72_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-22.0.0-h7376487_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.57-h421ea60_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.7-h5c52fec_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-6.31.1-h49aed37_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2025.11.05-h7b12aa8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.53.0-h0c1763c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.22.0-h454ac66_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_4.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.11.3-hfe17d71_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.42-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.51.0-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-ha9997c6_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-h26afc86_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.43-h711ed8c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.2-h25fd6f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.47.0-py310hee1c697_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lmfit-1.3.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.5-py310hde1b0b5_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py310h3406613_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.10.8-py310hff52083_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py310hfde16b3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/minio-7.2.20-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongomock-4.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongoquery-1.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/more-itertools-11.0.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mpc-1.4.0-he0a73b1_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.2-he0a73b1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mpl-scatter-density-0.8-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py310h03d9f68_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.1.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.19.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbstripout-0.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ndindex-1.10.1-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nh3-0.3.4-py310h6de7dc8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nomkl-1.0-h5ca1d4c_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.65.0-py310h225f558_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py310h5eaa309_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numexpr-2.14.1-py310h34a7263_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.6-py310hefbff90_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numpydoc-1.10.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/obstore-0.9.2-py310hdfeec95_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h55fea9a_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py310h05b0c27_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.2-h35e630c_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.2.1-hd747db4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/orjson-3.11.8-py310hfe99b16_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py310h0158d43_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-1.0.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.46-h1321c63_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py310h6557065_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pims-0.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-hooks-5.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prettytable-3.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py310h139afa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/py-cpuinfo-9.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-22.0.0-py310hff52083_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-22.0.0-py310h923f568_2_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pycairo-1.29.0-py310h8c3e0f7_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.14.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pycryptodome-3.23.0-py310he45356f_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.46.0-py310hd8f68c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.11.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.13.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyerfa-2.0.1.5-py310h32771cd_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pymongo-4.16.0-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.9.2-py310h2007e60_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.3-pyhc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.20-h3c07f61_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-blosc2-2.7.1-py310h8713f2e_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-1.3.2-py310hea6c23e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.10.20-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-jose-3.5.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.26-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2026.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.10-8_cp310.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pytokens-0.4.1-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2026.1.post1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py310h3406613_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.9.2-h994258b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.8.1-h1fbca29_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2025.11.05-h5301d42_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/readme_renderer-44.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/redis-py-7.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.37.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/reportlab-4.4.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.33.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-15.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.19.7-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rlpycairo-0.4.0-pyh6c17108_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.30.0-py310hd8f68c5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ruamel.yaml-0.19.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.15-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.6.0-h8399546_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.25.2-py310h0158d43_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.2-py310h1d65ade_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/secretstorage-3.4.1-py310hff52083_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sentinels-1.0.0-py_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py310hc8bbb35_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/shellingham-1.5.4-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/slicerator-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sparse-0.17.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-3.1.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.49-py310h139afa4_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stamina-26.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/starlette-1.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/suitcase-mongo-0.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-4.0.1-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.1.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2025.5.10-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.5-py310h7c4b9e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.3-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/twine-6.2.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typer-0.24.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzlocal-5.3.1-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py310h03d9f68_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uncertainties-3.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py310h7c4b9e2_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.44.0-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.44.0-h4457471_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/uvloop-0.22.1-py310h7c4b9e2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.2.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/watchfiles-1.1.1-py310hdfeec95_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.25.0-hd6090a7_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/websockets-16.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.6.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-h4f16b4b_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xlrd-2.0.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.2-h25fd6f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.2.5-hde8ca8f_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.25.0-py310h139afa4_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: git+https://github.com/NSLS2/chxtools?rev=main#04a3a27ceefa2436839e00f980be3f65aa4155e5 + - pypi: git+https://github.com/NSLS-II-CHX/eiger-io?rev=master#cb13bdc336e445697e6483556116aaba0368a5d3 + - pypi: https://files.pythonhosted.org/packages/fd/15/a70400eeea394dbcf34f926713499c3f7334b2ee0ec3639b794491ffc9c0/fabio-2025.10.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ef/4a/ac0f195f52fae450338cae90234588a2ead2337440b4e5ff7230775477a3/lxml-6.0.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: git+https://github.com/ChrisBeaumont/mpl-modest-image?rev=master#4174514a9ce7f4160fb6cbd200df6897694e0ac3 + - pypi: https://files.pythonhosted.org/packages/82/83/7dafb09fbc3efe9d00c4667d22b32b53d08e8a676fa164c6dd8f5debe85e/pyepics-3.5.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a0/e3/913ca30973886fa9ef5f1a78cf6a5a206ee76d8da2033ad67c4b8beec6b2/pyfai-2026.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b4/8c/4065950f9d013c4b2e588fe33cf04e564c2322842d84dbcbce5ba1dc28b0/PyQt5-5.15.11-cp38-abi3-manylinux_2_17_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/9a/46/ffe177f99f897a59dc237a20059020427bd2d3853d713992b8081933ddfe/pyqt5_qt5-5.15.18-py3-none-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/6c/4a/c66dfd090d93ef6f3e30093b55bf786c24608258d157cb1951e28c5a2725/pyqt5_sip-12.18.0-cp310-cp310-manylinux1_x86_64.manylinux_2_5_x86_64.whl + - pypi: git+https://github.com/scikit-beam/scikit-beam?rev=main#dbe344435f6b12749104b868f7e251624acee565 + - pypi: https://files.pythonhosted.org/packages/65/8a/2b46cb76762468deea3dbcc5370c858d60e5b7bdaf09bdccd0169707147c/silx-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: git+https://github.com/Nikea/xray-vision?rev=master#fc01c7bf7ca25fc2e824ed6f16481b4fa78a79a6 + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/_openmp_mutex-4.5-7_kmp_llvm.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/aiofiles-25.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/alembic-1.18.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-doc-0.0.4-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/area-detector-handlers-0.0.10-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/asgi-correlation-id-4.3.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asteval-1.0.8-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/attrs-26.1.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.9.1-h753d554_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.9.8-hca30140_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.12.5-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.1-h61d5560_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.6-hada8b3e_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.10.7-h241dc44_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.23.2-hcea795d_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.13.3-hf26a141_8.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.8.6-h1e3b5a0_7.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.4-h61d5560_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.7-h61d5560_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.35.0-h44e95eb_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.606-hf3ce6b4_6.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.16.1-h88fedcc_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.13.2-h853621b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.15.0-h10d327b_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.11.0-h7e4aa5d_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.13.0-hb288d13_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/backports-1.0-pyhd8ed1ab_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/backports.tarfile-1.2.0-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/black-26.3.1-pyha5154f8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/blosc-1.21.6-h7dd00d9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.9.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/boltons-25.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310h1af2607_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/brunsli-0.1-h97083b6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.6-hc919400_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-blosc2-2.19.1-h9c47b6e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-7.0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cachey-0.2.1-pyh9f0ad1d_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.4-he0f2337_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-2.0.0-py310hf5b66c1_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/charls-2.4.3-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.7-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/click-8.3.2-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cmarkgfm-2024.11.20-py310h7bdd564_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/codecov-2.1.13-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.2-py310h7f4e7e6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/coverage-7.13.5-py310hb46c203_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.10.20-py310hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-46.0.7-py310h12cab78_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cython-3.2.4-py310h9a762d2_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.1.0-py310h72544b6_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0b38-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dill-0.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2026.3.0-pyhc364b38_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/doct-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ecdsa-0.19.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email-validator-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/email_validator-2.3.0-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/event-model-1.23.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-0.135.3-hbd727af_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.23-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-core-0.135.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.25.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/flake8-7.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.17.1-h2b252f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.62.0-py310hb46c203_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.14.3-hce30654_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/freetype-py-2.3.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/giflib-5.2.2-h93a5062_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmp-6.3.0-h7bae524_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmpy2-2.3.0-py310h6ac7f53_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.4.0-py310h19b6747_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.8.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.16.0-nompi_py310h0c5f886_102.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.6-nompi_had3affe_108.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/historydict-1.2.6-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/httptools-0.7.1-py310hfe3a0ae_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/humanize-4.15.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.3-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/id-1.6.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.18-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/imagecodecs-2025.3.30-py310hc9b329b_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imageio-2.37.0-pyhfb79c49_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-2.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.8.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-7.1.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.37.0-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/isort-8.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.classes-3.4.0-pyhcf101f3_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.context-6.1.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.functools-4.4.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.1.0-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.9.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.9.1-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/jxrlib-1.1-h93a5062_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/keyring-25.7.0-pyh534df25_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.5.0-py310h34990b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.22.2-h385eeb1_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.5-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.18-hdfa7624_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.1.0-h1eee2c3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20250512.1-cxx17_hd41c47c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libaec-1.1.5-h8664d51_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-22.0.0-h7239961_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-22.0.0-hc317990_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-compute-22.0.0-h75845d1_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-22.0.0-hc317990_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-22.0.0-h144af7f_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.4.1-hfce71f6_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.11.0-6_h51639a9_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.11.0-6_hb0561ab_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.19.0-hd5a2499_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-22.1.3-h55c6f16_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libevent-2.1.12-h2757513_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.5-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.14.3-hce30654_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.14.3-hdfa99f5_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgcc-15.2.0-hcbb3090_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.2.0-h07b0088_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.2.0-hdae7583_18.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.39.0-head0a95_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.39.0-hfa3a374_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.73.1-h3063b79_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libhwy-1.3.0-h48b13b8_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.4.1-h84a0fba_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjxl-0.11.1-h7274d02_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.11.0-6_hd9741b5_openblas.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.3-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.68.1-h8f3e76b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.32-openmp_he657e61_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.21.0-he15edb5_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.21.0-hce30654_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-22.0.0-h0ac143b_1_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libpng-1.6.57-h132b30e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-6.31.1-h98f38fd_4.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2025.11.05-h91c62da_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.53.0-h1b79a29_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h1590b86_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.22.0-h14a376c_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.1-h7dc4979_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.11.3-h2431656_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.51.0-h6caf38d_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libwebp-base-1.6.0-h07db88b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.17.0-hdb1d25a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-16-2.15.2-h5ef1a60_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.15.2-h8d039ee_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.2-h8088a28_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzopfli-1.0.3-h9f76cd9_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-22.1.3-hc7d1edf_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.47.0-py310h4137262_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/lmfit-1.3.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.5-py310h36fcd3f_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.3-py310hb46c203_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-3.10.8-py310hb6292c7_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.8-py310h0181960_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.2.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongomock-4.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mongoquery-1.4.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/more-itertools-11.0.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpc-1.4.0-h169892a_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpfr-4.2.2-h6bc93b0_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.2-py310h0e897d2_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.1.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.19.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nbstripout-0.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ndindex-1.8-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nh3-0.3.4-py310hf32026f_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.12.0-h784d473_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.65.0-py310h71bca05_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.2.6-py310h4d83441_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/numpydoc-1.10.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.4-hd9e9057_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openpyxl-3.1.5-py310hb1d31aa_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.2-hd24854e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.2.1-h4fd0076_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/orjson-3.11.8-py310h38230ac_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.3.3-py310h25f4b65_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-1.0.4-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-12.0.0-py310hcac772a_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pims-0.7-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.46.4-h81086ad_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.6-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-hooks-5.0.0-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prettytable-3.17.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.25.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.2.2-py310haea493c_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-22.0.0-py310hb6292c7_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-22.0.0-py310h92b138f_2_cpu.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pycairo-1.29.0-py310h6464c50_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.14.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.13.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.46.0-py310h9365ca8_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.11.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.13.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.4.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.20.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pymongo-4.16.0-py310h8616463_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.3-pyhc364b38_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.20-h1b19095_0_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.2-pyhe01879c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.10.20-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-jose-3.5.0-pyhff2d567_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.26-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2026.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.10-8_cp310.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pytokens-0.4.1-py310haea493c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2026.1.post1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pywavelets-1.8.0-py310hc12b6d3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.3-py310hb46c203_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.8.1-h8246384_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2025.11.05-h64b956e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/readme_renderer-44.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.37.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/reportlab-4.4.10-pyhcf101f3_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.33.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-15.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.19.7-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rlpycairo-0.4.0-pyh6c17108_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.30.0-py310hf3301a5_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ruamel.yaml-0.19.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruamel.yaml.clib-0.2.15-py310haea493c_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-image-0.25.2-py310h25f4b65_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.15.2-py310h32ab4ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sentinels-1.0.0-py_1.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/shellingham-1.5.4-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/slicerator-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.2-hada39a4_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sparse-0.17.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-3.1.0-pyha770c72_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.49-py310haea493c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/starlette-1.0.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/suitcase-mongo-0.7.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-4.0.1-h0cb729a_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2025.5.10-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.1.0a117-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.1.0a117-hd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.5-py310h72544b6_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.3-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/twine-6.2.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typer-0.24.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.2-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzlocal-5.3.1-pyh8f84b5b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ukkonen-1.1.0-py310h1c35771_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uncertainties-3.2.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-17.0.1-py310h72544b6_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.44.0-pyhc90fa1f_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.44.0-h4457471_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/uvloop-0.22.1-py310hfe3a0ae_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.2.3-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchfiles-1.1.1-py310h53169e7_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/watchgod-0.7-pyhd8ed1ab_0.tar.bz2 + - conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.6.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/websockets-16.0-py310haea493c_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.6.1-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.12-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.5-hc919400_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2026.3.0-pyhd8ed1ab_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h925e9cb_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zfp-1.0.1-ha86207d_5.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.1-pyhcf101f3_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.2-h8088a28_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-ng-2.2.5-h3470cca_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.25.0-py310hf151d32_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda + - pypi: git+https://github.com/NSLS2/chxtools?rev=main#04a3a27ceefa2436839e00f980be3f65aa4155e5 + - pypi: git+https://github.com/NSLS-II-CHX/eiger-io?rev=master#cb13bdc336e445697e6483556116aaba0368a5d3 + - pypi: https://files.pythonhosted.org/packages/c0/d1/5ddb92494517fa5453c46872a87fd0d0e4a497543b353ce0aad6466f406a/fabio-2025.10.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/9c/13/15017f6210bfea843316d62f0f121e364e17bb129444ed803a256a213036/hdf5plugin-6.0.0-py3-none-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/c6/b9/93d71026bf6c4dfe3afc32064a3fcd533d9032c8b97499744a999f97c230/lxml-6.0.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/ChrisBeaumont/mpl-modest-image?rev=master#4174514a9ce7f4160fb6cbd200df6897694e0ac3 + - pypi: https://files.pythonhosted.org/packages/f3/89/6b07977baf2af75fb6692f9e7a1fb612a15f600fc921f3f565366de01f4a/numexpr-2.14.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/82/83/7dafb09fbc3efe9d00c4667d22b32b53d08e8a676fa164c6dd8f5debe85e/pyepics-3.5.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/56/22/9e726b9537ec8ebe9981dc3dce07d4e72b059ad0546bc19ca16771ee23a5/pyfai-2026.3.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/11/64/42ec1b0bd72d87f87bde6ceb6869f444d91a2d601f2e67cd05febc0346a1/PyQt5-5.15.11-cp38-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/24/8e/76366484d9f9dbe28e3bdfc688183433a7b82e314216e9b14c89e5fab690/pyqt5_qt5-5.15.18-py3-none-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2c/8e/9db5c0756134a6501ecf7d472a92f64d2d0b8033b4597e73f138a4cfb605/pyqt5_sip-12.18.0-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/scikit-beam/scikit-beam?rev=main#dbe344435f6b12749104b868f7e251624acee565 + - pypi: https://files.pythonhosted.org/packages/5f/bc/6a9aaa3e605cfe7e73856c9067ba42cd6289660eb7a6d31970c7f9cca725/silx-2.2.2-cp310-cp310-macosx_10_9_universal2.whl + - pypi: git+https://github.com/Nikea/xray-vision?rev=master#fc01c7bf7ca25fc2e824ed6f16481b4fa78a79a6 +packages: +- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + build_number: 20 + sha256: 1dd3fffd892081df9726d7eb7e0dea6198962ba775bd88842135a4ddb4deb3c9 + md5: a9f577daf3de00bca7c3c76c0ecbd1de + depends: + - __glibc >=2.17,<3.0.a0 + - libgomp >=7.5.0 + constrains: + - openmp_impl <0.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 28948 + timestamp: 1770939786096 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/_openmp_mutex-4.5-7_kmp_llvm.conda + build_number: 7 + sha256: 7acaa2e0782cad032bdaf756b536874346ac1375745fb250e9bdd6a48a7ab3cd + md5: a44032f282e7d2acdeb1c240308052dd + depends: + - llvm-openmp >=9.0.1 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8325 + timestamp: 1764092507920 +- conda: https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda + sha256: a3967b937b9abf0f2a99f3173fa4630293979bd1644709d89580e7c62a544661 + md5: aaa2a381ccc56eac91d63b6c1240312f + depends: + - cpython + - python-gil + license: MIT + license_family: MIT + purls: [] + size: 8191 + timestamp: 1744137672556 +- conda: https://conda.anaconda.org/conda-forge/noarch/_x86_64-microarch-level-1-3_x86_64.conda + build_number: 3 + sha256: 5f9029eaa78eb13a5499b7a2b012a47a18136b2d41bad99bb7b1796d1fc2b179 + md5: 225cb2e9b9512730a92f83696b8fbab8 + depends: + - __archspec 1.* x86_64 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 9818 + timestamp: 1764034326319 +- conda: https://conda.anaconda.org/conda-forge/linux-64/adbc-driver-manager-1.11.0-py310hea6c23e_0.conda + sha256: 0deae0dd9facd11c9f6952867cb44bb9039b557f3343685b2222a4cbb01e0de8 + md5: ec470c77efb659e53e2e38ba1aef8e9c + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - pyarrow >=8.0.0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/adbc-driver-manager?source=hash-mapping + size: 421546 + timestamp: 1775526690284 +- conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-postgresql-1.8.0-pyha770c72_1.conda + sha256: 5f591b79a26083ee0345d1b5c604e5d811795835c47ddccaf3e80794e2f753a1 + md5: f5fb77fa4e3552c149554b9553479bb1 + depends: + - adbc-driver-manager >=1.8.0,<2.0a0 + - importlib_resources + - libadbc-driver-postgresql >=1.8.0,<1.8.1.0a0 + - python >=3.10 + constrains: + - pyarrow >=8.0.0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/adbc-driver-postgresql?source=hash-mapping + size: 25812 + timestamp: 1758848923168 +- conda: https://conda.anaconda.org/conda-forge/noarch/adbc-driver-sqlite-1.11.0-pyha770c72_0.conda + sha256: 341adfb3be3576df484e6c6bd46f0f904545dd346a784e60d9604f59ccf40f5e + md5: e3033a19ff2de79254e1d926804b0d63 + depends: + - adbc-driver-manager >=1.11.0,<2.0a0 + - importlib_resources + - libadbc-driver-sqlite >=1.11.0,<1.11.1.0a0 + - python >=3.10 + constrains: + - pyarrow >=8.0.0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/adbc-driver-sqlite?source=hash-mapping + size: 30984 + timestamp: 1775527223964 +- conda: https://conda.anaconda.org/conda-forge/noarch/aiofiles-25.1.0-pyhcf101f3_1.conda + sha256: f37288164cf28b916b184b0a5e89225e17af0e0c9bcd4d0dc39e5a597fcfeed1 + md5: a6435dc39a8031d33d6d52859913e939 + depends: + - python >=3.10 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/aiofiles?source=hash-mapping + size: 24824 + timestamp: 1767290524894 +- conda: https://conda.anaconda.org/conda-forge/noarch/aiosqlite-0.22.1-pyhcf101f3_1.conda + sha256: 8b23dfe615f958724269733d348139fb7615f29e0d35a9b556fe823d65a941a8 + md5: 9be31ce1f8e613fbb3b140430e109d41 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/aiosqlite?source=hash-mapping + size: 22318 + timestamp: 1767730199932 +- conda: https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda + sha256: 6c4456a138919dae9edd3ac1a74b6fbe5fd66c05675f54df2f8ab8c8d0cc6cea + md5: 1fd9696649f65fd6611fcdb4ffec738a + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/alabaster?source=hash-mapping + size: 18684 + timestamp: 1733750512696 +- conda: https://conda.anaconda.org/conda-forge/noarch/alembic-1.18.4-pyhcf101f3_0.conda + sha256: 83fc576dbcd59427f55be9623e1b101a1607ed9b4dc8633d86ada30c6ec1cf1d + md5: c45fa7cf996b766cb63eadf3c3e6408a + depends: + - python >=3.10 + - sqlalchemy >=1.4.23 + - mako + - typing_extensions >=4.12 + - tomli + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/alembic?source=hash-mapping + size: 184763 + timestamp: 1770806831769 +- conda: https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda + sha256: d88aa7ae766cf584e180996e92fef2aa7d8e0a0a5ab1d4d49c32390c1b5fff31 + md5: dcdc58c15961dbf17a0621312b01f5cb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: LGPL-2.1-or-later + license_family: GPL + purls: [] + size: 584660 + timestamp: 1768327524772 +- conda: https://conda.anaconda.org/conda-forge/noarch/annotated-doc-0.0.4-pyhcf101f3_0.conda + sha256: cc9fbc50d4ee7ee04e49ee119243e6f1765750f0fd0b4d270d5ef35461b643b1 + md5: 52be5139047efadaeeb19c6a5103f92a + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/annotated-doc?source=hash-mapping + size: 14222 + timestamp: 1762868213144 +- conda: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda + sha256: e0ea1ba78fbb64f17062601edda82097fcf815012cf52bb704150a2668110d48 + md5: 2934f256a8acfe48f6ebb4fce6cde29c + depends: + - python >=3.9 + - typing-extensions >=4.0.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/annotated-types?source=hash-mapping + size: 18074 + timestamp: 1733247158254 +- conda: https://conda.anaconda.org/conda-forge/noarch/anyio-4.13.0-pyhcf101f3_0.conda + sha256: f09aed24661cd45ba54a43772504f05c0698248734f9ae8cd289d314ac89707e + md5: af2df4b9108808da3dc76710fe50eae2 + depends: + - exceptiongroup >=1.0.2 + - idna >=2.8 + - python >=3.10 + - typing_extensions >=4.5 + - python + constrains: + - trio >=0.32.0 + - uvloop >=0.22.1 + - winloop >=0.2.3 + license: MIT + license_family: MIT + purls: + - pkg:pypi/anyio?source=compressed-mapping + size: 146764 + timestamp: 1774359453364 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda + sha256: b08ef033817b5f9f76ce62dfcac7694e7b6b4006420372de22494503decac855 + md5: 346722a0be40f6edc53f12640d301338 + depends: + - libgcc-ng >=12 + - libstdcxx-ng >=12 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 2706396 + timestamp: 1718551242397 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda + sha256: ec238f18ce8140485645252351a0eca9ef4f7a1c568a420f240a585229bc12ef + md5: 7adba36492a1bb22d98ffffe4f6fc6de + depends: + - __osx >=11.0 + - libcxx >=16 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 2235747 + timestamp: 1718551382432 +- conda: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda + sha256: 5b9ef6d338525b332e17c3ed089ca2f53a5d74b7a7b432747d29c6466e39346d + md5: f4e90937bbfc3a4a92539545a37bb448 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/appdirs?source=hash-mapping + size: 14835 + timestamp: 1733754069532 +- conda: https://conda.anaconda.org/conda-forge/noarch/area-detector-handlers-0.0.10-pyhd8ed1ab_0.tar.bz2 + sha256: 75ea052a3fd16d518612e2165b7fb2b53c91226552b557755949ab301b871550 + md5: e410310445f38d0371ab90f76d27c798 + depends: + - dask + - entrypoints + - h5py + - pandas + - python >=3.6 + - tifffile >=2020.8.25 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/area-detector-handlers?source=hash-mapping + size: 21977 + timestamp: 1664603052349 +- conda: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-25.1.0-pyhd8ed1ab_0.conda + sha256: bea62005badcb98b1ae1796ec5d70ea0fc9539e7d59708ac4e7d41e2f4bb0bad + md5: 8ac12aff0860280ee0cff7fa2cf63f3b + depends: + - argon2-cffi-bindings + - python >=3.9 + - typing-extensions + constrains: + - argon2_cffi ==999 + license: MIT + license_family: MIT + purls: + - pkg:pypi/argon2-cffi?source=hash-mapping + size: 18715 + timestamp: 1749017288144 +- conda: https://conda.anaconda.org/conda-forge/linux-64/argon2-cffi-bindings-25.1.0-py310h7c4b9e2_2.conda + sha256: 5396242c40688b33b57d8564025569598ab4848fd03852bb7415443b9f421fa1 + md5: 7f9a178be0c687e77f7248507737d15e + depends: + - __glibc >=2.17,<3.0.a0 + - cffi >=1.0.1 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/argon2-cffi-bindings?source=hash-mapping + size: 35370 + timestamp: 1762509501470 +- conda: https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2 + sha256: b3e9369529fe7d721b66f18680ff4b561e20dbf6507e209e1f60eac277c97560 + md5: c0481c9de49f040272556e2cedf42816 + depends: + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/asciitree?source=hash-mapping + size: 6164 + timestamp: 1531050741142 +- conda: https://conda.anaconda.org/conda-forge/noarch/asgi-correlation-id-4.3.3-pyhd8ed1ab_0.conda + sha256: dfb3c7cfa5c2704ca0bfc3259f06fce3c722e1bcfcb13174149e65c6c8fabdec + md5: 750ade3651ec3b17658b01c5671fec94 + depends: + - python >=3.7,<4.0 + - starlette >=0.18 + license: BSD-4-Clause + purls: + - pkg:pypi/asgi-correlation-id?source=hash-mapping + size: 19693 + timestamp: 1725901418784 +- conda: https://conda.anaconda.org/conda-forge/noarch/asteval-1.0.8-pyhd8ed1ab_0.conda + sha256: bf452a859eeb37a583f87e7f8eaab9a5104faf1c8322bc8ad5905165bb5b600d + md5: 361b12fb5a595f025ab0289c715a56bd + depends: + - numpy >=1.22 + - pip + - python >=3.10 + - setuptools + - setuptools-scm + license: MIT + license_family: MIT + purls: + - pkg:pypi/asteval?source=hash-mapping + size: 27759 + timestamp: 1766036175098 +- conda: https://conda.anaconda.org/conda-forge/linux-64/astropy-6.1.7-py310hf462985_0.conda + sha256: 9cafabb1f950055717abda76db080e40743794f39c924a137e44fce97bb8e14b + md5: b1b72b1c8205f2dba8c976bdf4b9fd14 + depends: + - __glibc >=2.17,<3.0.a0 + - astropy-iers-data >=0.2024.10.28.0.34.7 + - importlib-metadata + - libgcc >=13 + - numpy >=1.19,<3 + - numpy >=1.23 + - packaging >=19.0 + - pyerfa >=2.0.1.1 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - pyyaml >=3.13 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/astropy?source=hash-mapping + size: 7693872 + timestamp: 1732730360427 +- conda: https://conda.anaconda.org/conda-forge/noarch/astropy-base-6.1.7-he5c6ecd_0.conda + sha256: c4b11c13a967587f12a19d1a6e37cac05f523a23a3496959fc1229c1868b7f0c + md5: d6b2ca3fe446339084bf322c70a21964 + depends: + - astropy 6.1.7 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 7196 + timestamp: 1746713741098 +- conda: https://conda.anaconda.org/conda-forge/noarch/astropy-iers-data-0.2026.4.13.0.58.2-pyhd8ed1ab_0.conda + sha256: c9770bab1fd1d0ed07fefe4d06fc008e7c75a95935a92d8abf15a5a3f103833c + md5: 549b0846ae548c64fc3498f02052a875 + depends: + - python >=3.10 + license: BSD-3-Clause + purls: + - pkg:pypi/astropy-iers-data?source=hash-mapping + size: 1250972 + timestamp: 1776056345471 +- conda: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.1-pyhd8ed1ab_0.conda + sha256: ee4da0f3fe9d59439798ee399ef3e482791e48784873d546e706d0935f9ff010 + md5: 9673a61a297b00016442e022d689faa6 + depends: + - python >=3.10 + constrains: + - astroid >=2,<5 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/asttokens?source=hash-mapping + size: 28797 + timestamp: 1763410017955 +- conda: https://conda.anaconda.org/conda-forge/noarch/async-timeout-5.0.1-pyhcf101f3_2.conda + sha256: 6638b68ab2675d0bed1f73562a4e75a61863b903be1538282cddb56c8e8f75bd + md5: 0d0ef7e4a0996b2c4ac2175a12b3bf69 + depends: + - python >=3.10 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/async-timeout?source=hash-mapping + size: 13559 + timestamp: 1767290444597 +- conda: https://conda.anaconda.org/conda-forge/linux-64/asyncpg-0.31.0-py310h139afa4_1.conda + sha256: 32894a98cd859b8e013b631b1edbd1dff4251ae332ec854a315cc9572e8f040c + md5: 9b2d3f8d89ee7f6a5eab40305ad18ad4 + depends: + - python + - async-timeout >=4.0.3 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/asyncpg?source=hash-mapping + size: 679565 + timestamp: 1768733534657 +- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-26.1.0-pyhcf101f3_0.conda + sha256: 1b6124230bb4e571b1b9401537ecff575b7b109cc3a21ee019f65e083b8399ab + md5: c6b0543676ecb1fb2d7643941fe375f2 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/attrs?source=compressed-mapping + size: 64927 + timestamp: 1773935801332 +- conda: https://conda.anaconda.org/conda-forge/noarch/awkward-2.9.0-pyhcf101f3_0.conda + sha256: e92d541cc405fec08b46cb35c36eedf9dc04df7bec5e6aaf0e1ea8f11b503d83 + md5: 93fcf05b2824e06745272380bc7a7dfc + depends: + - python >=3.10 + - awkward-cpp ==52 + - importlib-metadata >=4.13.0 + - numpy >=1.18.0 + - packaging + - typing_extensions >=4.1.0 + - fsspec >=2022.11.0 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/awkward?source=hash-mapping + size: 471272 + timestamp: 1770661007420 +- conda: https://conda.anaconda.org/conda-forge/linux-64/awkward-cpp-52-py310h7f712d7_0.conda + sha256: 98aec1732d9a0fd7f34558cfb0336c64895360e093e3d72a315c3e37d6411ad4 + md5: 5721a9a90c1c1297d08e353f9e1e9655 + depends: + - python + - numpy >=1.21.3 + - libstdcxx >=14 + - libgcc >=14 + - _x86_64-microarch-level >=1 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/awkward-cpp?source=hash-mapping + size: 611645 + timestamp: 1770651918678 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.9.1-h194c533_5.conda + sha256: 7dcbb1eb07158274d7f71377c574bb5f3d2868574f6dcdfcaab4d617deb9f52f + md5: bf0d77362aad67108ea0ace5985807e3 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - aws-c-sdkutils >=0.2.4,<0.2.5.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 122969 + timestamp: 1762200199500 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.9.1-h753d554_5.conda + sha256: 42090a345f70ded10039bb1fc7817c8b45dbdeb2bfb0f0529b4c581096e9a014 + md5: 4d72b29e183b119a6cd1e38a27ce6686 + depends: + - __osx >=11.0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-sdkutils >=0.2.4,<0.2.5.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 106611 + timestamp: 1762200250699 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.9.8-h346e085_0.conda + sha256: a2f13bb3da7534a18fb05e5d5de13d3d02fd468aeba0be28147797ef0a1ffce8 + md5: 170690366791b506d48f69f6f0e01bad + depends: + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - libgcc >=14 + - openssl >=3.5.4,<4.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 55819 + timestamp: 1761947323959 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.9.8-hca30140_0.conda + sha256: 0a9917eec3902399236659945c0a4bd23650db5e980534675e81a3ff3f40ff45 + md5: 9915036c8270a5dfc1ffb40585987eab + depends: + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 44807 + timestamp: 1761947474954 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.12.5-hb03c661_1.conda + sha256: f5876cc9792346ecdb0326f16f38b2f2fd7b5501228c56419330338fcf37e676 + md5: f1d45413e1c41a7eff162bf702c02cea + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 238560 + timestamp: 1762858460824 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.12.5-hc919400_1.conda + sha256: 48577d647f5e9e7fec531b152e3e31f7845ba81ae2e59529a97eac57adb427ae + md5: 7338b3d3f6308f375c94370728df10fc + depends: + - __osx >=11.0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 223540 + timestamp: 1762858953852 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.1-h7e655bb_8.conda + sha256: e91d2fc0fddf069b8d39c0ce03eca834673702f7e17eda8e7ffc4558b948053d + md5: 1baf55dfcc138d98d437309e9aba2635 + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 22138 + timestamp: 1762957433991 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.1-h61d5560_8.conda + sha256: c42c905ea099ddc93f1d517755fb740cc26514ca4e500f697241d04980fda03d + md5: ea7a505949c1bf4a51b2cccc89f8120d + depends: + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 21066 + timestamp: 1762957452685 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.6-h1deb5b9_4.conda + sha256: ed5131ac1f3f380b2a9f2035a4947e6d96e110bc3d4e24ca12f620e2e861fb07 + md5: 61939d0173b83ed26953e30b5cb37322 + depends: + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-checksums >=0.2.7,<0.2.8.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 58937 + timestamp: 1761592570359 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.6-hada8b3e_4.conda + sha256: 0bb2185a639ff34d96a70ba687e2c39c9b81e842b85d8817aca63e14e00f70ef + md5: b856c74bc570d617b6550f10bfe03533 + depends: + - __osx >=11.0 + - libcxx >=19 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-checksums >=0.2.7,<0.2.8.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 51935 + timestamp: 1761592632928 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.10.7-had4b759_1.conda + sha256: 6bb3cb03fddb0010a7e35b2616c34c08cbc601cbe9eebdeaabf47a84b3c2087b + md5: 11b26a1eb8183c11140ca369120bd0c0 + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-compression >=0.3.1,<0.3.2.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 224431 + timestamp: 1762195010218 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.10.7-h241dc44_1.conda + sha256: bf8cc02c600d60d4d8d170eba11350211b2faaae9459c0d1c623e4a18ea79169 + md5: 1ba37dd519ce567ce4862f7040d024d5 + depends: + - __osx >=11.0 + - aws-c-compression >=0.3.1,<0.3.2.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 170787 + timestamp: 1762195030972 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.23.2-hbff472d_2.conda + sha256: b2df226d99bd4a48228f0cc8acebef6e3912c85709053dacb05136e56cdf8b63 + md5: 4db56ebbdc330e40dbb38e0bd9fb4cad + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - s2n >=1.6.0,<1.6.1.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 181061 + timestamp: 1762187768170 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.23.2-hcea795d_2.conda + sha256: e27c4e1b9a741e5fb41395c3116e2b4543b46db0af69e7de721de1d709152eb0 + md5: b33513f122da8f6f4606bbef8b8b084c + depends: + - __osx >=11.0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 176080 + timestamp: 1762187854052 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.13.3-h8ba2272_8.conda + sha256: 4c745a09b136f6cb3a012cfafd09a98386536dc80f8121d555d990e88481489f + md5: bc8b3533526bf68ed5e6114f63f781ba + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 216101 + timestamp: 1762200731083 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.13.3-hf26a141_8.conda + sha256: 61ed17e68e143de4eddceecac0f244439268a3762538730ff24a5d6d18854294 + md5: 86e356901766b717e02985de6f8c71e6 + depends: + - __osx >=11.0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 150212 + timestamp: 1762200774307 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.8.6-h493c25d_7.conda + sha256: eb123f66ad3697be4852c7abdb394a33997aba3d896eb1c6d557e1e42b252c8d + md5: 04f44f1cbc96b225f41852c188f5e8d9 + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-auth >=0.9.1,<0.9.2.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-checksums >=0.2.7,<0.2.8.0a0 + - openssl >=3.5.4,<4.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 137511 + timestamp: 1762249980464 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.8.6-h1e3b5a0_7.conda + sha256: 55f3e5d7ecfd50d4d9d6e0de641b571c7938e048ed7412a353befef861893e35 + md5: ae4d69d38b4806646b1998ca6923a68f + depends: + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-auth >=0.9.1,<0.9.2.0a0 + - aws-checksums >=0.2.7,<0.2.8.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 117757 + timestamp: 1762250031879 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.4-h7e655bb_3.conda + sha256: 8d84039ea1d33021623916edfc23f063a5bcef90e8f63ae7389e1435deb83e53 + md5: 70e83d2429b7edb595355316927dfbea + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 59204 + timestamp: 1762957305800 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.4-h61d5560_3.conda + sha256: 5f93a440eae67085fc36c45d9169635569e71a487a8b359799281c1635befa68 + md5: 2781d442c010c31abcad68703ebbc205 + depends: + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 53172 + timestamp: 1762957351489 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.7-h7e655bb_4.conda + sha256: a95b3cc8e3c0ddb664bbd26333b35986fd406f02c2c60d380833751d2d9393bd + md5: 83a6e0fc73a7f18a8024fc89455da81c + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 76774 + timestamp: 1762957236884 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.7-h61d5560_4.conda + sha256: 90b1705b8f5e42981d6dd9470218dc8994f08aa7d8ed3787dcbf5a168837d179 + md5: 4fca5f39d47042f0cb0542e0c1420875 + depends: + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 74065 + timestamp: 1762957260262 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.35.0-h719b17a_2.conda + sha256: d34850625fdcbaeda37fd3b83446b71e925463ec79d15ba430636c19526116ed + md5: 2e313660820653ba7557ccbe235b402a + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libgcc >=14 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-mqtt >=0.13.3,<0.13.4.0a0 + - aws-c-sdkutils >=0.2.4,<0.2.5.0a0 + - aws-c-event-stream >=0.5.6,<0.5.7.0a0 + - aws-c-s3 >=0.8.6,<0.8.7.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-auth >=0.9.1,<0.9.2.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 408300 + timestamp: 1762256210769 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.35.0-h44e95eb_2.conda + sha256: bc224e776a82e4abaded096d97df672b37911c4d7e783080051b043ef402c84e + md5: e9b0347882768ccef4aa4c103e3daa67 + depends: + - libcxx >=19 + - __osx >=11.0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - aws-c-event-stream >=0.5.6,<0.5.7.0a0 + - aws-c-cal >=0.9.8,<0.9.9.0a0 + - aws-c-http >=0.10.7,<0.10.8.0a0 + - aws-c-auth >=0.9.1,<0.9.2.0a0 + - aws-c-s3 >=0.8.6,<0.8.7.0a0 + - aws-c-io >=0.23.2,<0.23.3.0a0 + - aws-c-sdkutils >=0.2.4,<0.2.5.0a0 + - aws-c-mqtt >=0.13.3,<0.13.4.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 265495 + timestamp: 1762256230196 +- conda: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.606-h522d481_6.conda + sha256: 36492a2aa10ec63a1f550aa4089b6c9876deced6b3ff4d63689b54f57c545340 + md5: 87a2b0b9822db0ec8bec1c280a8a4443 + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - libzlib >=1.3.1,<2.0a0 + - aws-crt-cpp >=0.35.0,<0.35.1.0a0 + - libcurl >=8.17.0,<9.0a0 + - aws-c-event-stream >=0.5.6,<0.5.7.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 3473279 + timestamp: 1762368204170 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.606-hf3ce6b4_6.conda + sha256: df48e0254af0efd927e3af5d0ef3c0b9dc6e9dedbf3bcb2f69a2bc61508de472 + md5: 530f0411c283dc014ead36af4542d182 + depends: + - __osx >=11.0 + - libcxx >=19 + - aws-crt-cpp >=0.35.0,<0.35.1.0a0 + - aws-c-common >=0.12.5,<0.12.6.0a0 + - libcurl >=8.17.0,<9.0a0 + - libzlib >=1.3.1,<2.0a0 + - aws-c-event-stream >=0.5.6,<0.5.7.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 3121535 + timestamp: 1762368276514 +- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.16.1-h3a458e0_0.conda + sha256: cba633571e7368953520a4f66dc74c3942cc12f735e0afa8d3d5fc3edf35c866 + md5: 1d4e0d37da5f3c22ecd44033f673feba + depends: + - __glibc >=2.17,<3.0.a0 + - libcurl >=8.14.1,<9.0a0 + - libgcc >=14 + - libstdcxx >=14 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 348231 + timestamp: 1760926677260 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.16.1-h88fedcc_0.conda + sha256: d995413e4daf19ee3120f3ab9f0c9e330771787f33cbd4a33d8e5445f52022e3 + md5: fbe485a39b05090c0b5f8bb4febcd343 + depends: + - __osx >=11.0 + - libcurl >=8.14.1,<9.0a0 + - libcxx >=19 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 289984 + timestamp: 1760927117177 +- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.13.2-h3a5f585_1.conda + sha256: fc1df5ea2595f4f16d0da9f7713ce5fed20cb1bfc7fb098eda7925c7d23f0c45 + md5: 4e921d9c85e6559c60215497978b3cdb + depends: + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - libgcc >=14 + - libstdcxx >=14 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 249684 + timestamp: 1761066654684 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.13.2-h853621b_1.conda + sha256: a4ed52062025035d9c1b3d8c70af39496fc5153cc741420139a770bc1312cfd6 + md5: fac63edc393d7035ab23fbccdeda34f4 + depends: + - __osx >=11.0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - libcxx >=19 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 167268 + timestamp: 1761066827371 +- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.15.0-h2a74896_1.conda + sha256: 58879f33cd62c30a4d6a19fd5ebc59bd0c4560f575bd02645d93d342b6f881d2 + md5: ffd553ff98ce5d74d3d89ac269153149 + depends: + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-storage-common-cpp >=12.11.0,<12.11.1.0a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + purls: [] + size: 576406 + timestamp: 1761080005291 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.15.0-h10d327b_1.conda + sha256: 274267b458ed51f4b71113fe615121fabd6f1d7b62ebfefdad946f8436a5db8e + md5: 443b74cf38c6b0f4b675c0517879ce69 + depends: + - __osx >=11.0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-storage-common-cpp >=12.11.0,<12.11.1.0a0 + - libcxx >=19 + license: MIT + license_family: MIT + purls: [] + size: 425175 + timestamp: 1761080947110 +- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.11.0-h3d7a050_1.conda + sha256: eb590e5c47ee8e6f8cc77e9c759da860ae243eed56aceb67ce51db75f45c9a50 + md5: 89985ba2a3742f34be6aafd6a8f3af8c + depends: + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - libgcc >=14 + - libstdcxx >=14 + - libxml2 + - libxml2-16 >=2.14.6 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 149620 + timestamp: 1761066643066 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.11.0-h7e4aa5d_1.conda + sha256: 74803bd26983b599ea54ff1267a0c857ff37ccf6f849604a72eb63d8d30e4425 + md5: ac9113ea0b7ed5ecf452503f82bf2956 + depends: + - __osx >=11.0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - libcxx >=19 + - libxml2 + - libxml2-16 >=2.14.6 + - openssl >=3.5.4,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 121744 + timestamp: 1761066874537 +- conda: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.13.0-hf38f1be_1.conda + sha256: 9f3d0f484e97cef5f019b7faef0c07fb7ee6c584e3a6e2954980f440978a365e + md5: f10b9303c7239fbce3580a60a92bcf97 + depends: + - __glibc >=2.17,<3.0.a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-storage-blobs-cpp >=12.15.0,<12.15.1.0a0 + - azure-storage-common-cpp >=12.11.0,<12.11.1.0a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + purls: [] + size: 299198 + timestamp: 1761094654852 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.13.0-hb288d13_1.conda + sha256: 2205e24d587453a04b075f86c59e3e72ad524c447fc5be61d7d1beb3cf2d7661 + md5: 595091ae43974e5059d6eabf0a6a7aa5 + depends: + - __osx >=11.0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-storage-blobs-cpp >=12.15.0,<12.15.1.0a0 + - azure-storage-common-cpp >=12.11.0,<12.11.1.0a0 + - libcxx >=19 + license: MIT + license_family: MIT + purls: [] + size: 197152 + timestamp: 1761094913245 +- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_1.conda + sha256: a14a9ad02101aab25570543a59c5193043b73dc311a25650134ed9e6cb691770 + md5: f1976ce927373500cc19d3c0b2c85177 + depends: + - python >=3.10 + - python + constrains: + - pytz >=2015.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/babel?source=compressed-mapping + size: 7684321 + timestamp: 1772555330347 +- conda: https://conda.anaconda.org/conda-forge/noarch/backports-1.0-pyhd8ed1ab_5.conda + sha256: e1c3dc8b5aa6e12145423fed262b4754d70fec601339896b9ccf483178f690a6 + md5: 767d508c1a67e02ae8f50e44cacfadb2 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 7069 + timestamp: 1733218168786 +- conda: https://conda.anaconda.org/conda-forge/noarch/backports.tarfile-1.2.0-pyhcf101f3_2.conda + sha256: 25abdb37e186f0d6ac3b774a63c81c5bc4bf554b5096b51343fa5e7c381193b1 + md5: bea46844deb274b2cc2a3a941745fa73 + depends: + - python >=3.10 + - backports + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/backports-tarfile?source=hash-mapping + size: 35739 + timestamp: 1767290467820 +- conda: https://conda.anaconda.org/conda-forge/noarch/black-26.3.1-pyha5154f8_0.conda + sha256: d89173ccd424b6f322e2e0bfddd0eb00288710d08891fc5f9459aa70be662abe + md5: fe0e363af66f3f3ac3c7d2c0df15a75f + depends: + - click >=8.0.0 + - mypy_extensions >=0.4.3 + - packaging >=22.0 + - pathspec >=0.9 + - platformdirs >=2 + - python >=3.10,<3.11 + - pytokens >=0.4 + - tomli >=1.1.0 + - typing_extensions >=4.0.1 + license: MIT + license_family: MIT + purls: + - pkg:pypi/black?source=hash-mapping + size: 172014 + timestamp: 1773315341222 +- conda: https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda + sha256: e7af5d1183b06a206192ff440e08db1c4e8b2ca1f8376ee45fb2f3a85d4ee45d + md5: 2c2fae981fd2afd00812c92ac47d023d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - snappy >=1.2.1,<1.3.0a0 + - zstd >=1.5.6,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 48427 + timestamp: 1733513201413 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/blosc-1.21.6-h7dd00d9_1.conda + sha256: c3fe902114b9a3ac837e1a32408cc2142c147ec054c1038d37aec6814343f48a + md5: 925acfb50a750aa178f7a0aced77f351 + depends: + - __osx >=11.0 + - libcxx >=18 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - snappy >=1.2.1,<1.3.0a0 + - zstd >=1.5.6,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 33602 + timestamp: 1733513285902 +- conda: https://conda.anaconda.org/conda-forge/noarch/bluesky-tiled-plugins-2.0.2-pyhd8ed1ab_0.conda + sha256: 75bab8ba15fd3898ff61a1a0fc1f4b73d1a29e93b44d7bfed49b6b1e6e318c40 + md5: e7c9fcd641142d8f109cf321812a156e + depends: + - dask-core + - event-model + - mongoquery + - python >=3.10 + - pytz + - tiled-client >=0.2.0 + - tzlocal + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/bluesky-tiled-plugins?source=hash-mapping + size: 50277 + timestamp: 1771046732966 +- conda: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.9.0-pyhd8ed1ab_0.conda + sha256: 96a6486d4fe27c02c1092a40096dfd82043929b3a7da156a49b28d851159c551 + md5: b9a6da57e94cd12bd71e7ab0713ef052 + depends: + - contourpy >=1.2 + - jinja2 >=2.9 + - narwhals >=1.13 + - numpy >=1.16 + - packaging >=16.8 + - pillow >=7.1.0 + - python >=3.10 + - pyyaml >=3.10 + - tornado >=6.2 + - xyzservices >=2021.09.1 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/bokeh?source=hash-mapping + size: 4240579 + timestamp: 1773302678722 +- conda: https://conda.anaconda.org/conda-forge/noarch/boltons-25.0.0-pyhd8ed1ab_0.conda + sha256: ea5f4c876eff2ed469551b57f1cc889a3c01128bf3e2e10b1fea11c3ef39eac2 + md5: c7eb87af73750d6fd97eff8bbee8cb9c + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/boltons?source=hash-mapping + size: 302296 + timestamp: 1749686302834 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb03c661_4.conda + sha256: 294526a54fa13635341729f250d0b1cf8f82cad1e6b83130304cbf3b6d8b74cc + md5: eaf3fbd2aa97c212336de38a51fe404e + depends: + - __glibc >=2.17,<3.0.a0 + - brotli-bin 1.1.0 hb03c661_4 + - libbrotlidec 1.1.0 hb03c661_4 + - libbrotlienc 1.1.0 hb03c661_4 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 19883 + timestamp: 1756599394934 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-h6caf38d_4.conda + sha256: 8aa8ee52b95fdc3ef09d476cbfa30df722809b16e6dca4a4f80e581012035b7b + md5: ce8659623cea44cc812bc0bfae4041c5 + depends: + - __osx >=11.0 + - brotli-bin 1.1.0 h6caf38d_4 + - libbrotlidec 1.1.0 h6caf38d_4 + - libbrotlienc 1.1.0 h6caf38d_4 + license: MIT + license_family: MIT + purls: [] + size: 20003 + timestamp: 1756599758165 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb03c661_4.conda + sha256: 444903c6e5c553175721a16b7c7de590ef754a15c28c99afbc8a963b35269517 + md5: ca4ed8015764937c81b830f7f5b68543 + depends: + - __glibc >=2.17,<3.0.a0 + - libbrotlidec 1.1.0 hb03c661_4 + - libbrotlienc 1.1.0 hb03c661_4 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 19615 + timestamp: 1756599385418 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-h6caf38d_4.conda + sha256: e57d402b02c9287b7c02d9947d7b7b55a4f7d73341c210c233f6b388d4641e08 + md5: ab57f389f304c4d2eb86d8ae46d219c3 + depends: + - __osx >=11.0 + - libbrotlidec 1.1.0 h6caf38d_4 + - libbrotlienc 1.1.0 h6caf38d_4 + license: MIT + license_family: MIT + purls: [] + size: 17373 + timestamp: 1756599741779 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hea6c23e_4.conda + sha256: 29f24d4a937c3a7f4894d6be9d9f9604adbb5506891f0f37bbb7e2dc8fa6bc0a + md5: 6ef43db290647218e1e04c2601675bff + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - libbrotlicommon 1.1.0 hb03c661_4 + license: MIT + license_family: MIT + purls: + - pkg:pypi/brotli?source=hash-mapping + size: 353838 + timestamp: 1756599456833 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310h1af2607_4.conda + sha256: 75cc1a5e99914ca5777713afe8d262e122c203ebbee0366a76338cb750534ac9 + md5: cd63cc758578ca3318f9c479be55dc30 + depends: + - __osx >=11.0 + - libcxx >=19 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - libbrotlicommon 1.1.0 h6caf38d_4 + license: MIT + license_family: MIT + purls: + - pkg:pypi/brotli?source=hash-mapping + size: 340989 + timestamp: 1756600184408 +- conda: https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-he3183e4_1.conda + sha256: fddad9bb57ee7ec619a5cf4591151578a2501c3bf8cb3b4b066ac5b54c85a4dd + md5: 799ebfe432cb3949e246b69278ef851c + depends: + - __glibc >=2.17,<3.0.a0 + - libbrotlicommon >=1.1.0,<1.2.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + purls: [] + size: 168813 + timestamp: 1757453968120 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brunsli-0.1-h97083b6_1.conda + sha256: 3bf4ef58d2c11efe5926c5a2efc77f54f2e3905e5b3ed6ea7f129157f446a989 + md5: b36fe588d614b5dc3279e080a6925b3d + depends: + - __osx >=11.0 + - libbrotlicommon >=1.1.0,<1.2.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libcxx >=19 + license: MIT + license_family: MIT + purls: [] + size: 141426 + timestamp: 1757454314055 +- conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + sha256: 0b75d45f0bba3e95dc693336fa51f40ea28c980131fec438afb7ce6118ed05f6 + md5: d2ffd7602c02f2b316fd921d39876885 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 260182 + timestamp: 1771350215188 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + sha256: 540fe54be35fac0c17feefbdc3e29725cce05d7367ffedfaaa1bdda234b019df + md5: 620b85a3f45526a8bc4d23fd78fc22f0 + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 124834 + timestamp: 1771350416561 +- conda: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda + sha256: cc9accf72fa028d31c2a038460787751127317dcfa991f8d1f1babf216bb454e + md5: 920bb03579f15389b9e512095ad995b7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 207882 + timestamp: 1765214722852 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.6-hc919400_0.conda + sha256: 2995f2aed4e53725e5efbc28199b46bf311c3cab2648fc4f10c2227d6d5fa196 + md5: bcb3cba70cf1eec964a03b4ba7775f01 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 180327 + timestamp: 1765215064054 +- conda: https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.2-h3122c55_1.conda + sha256: 6c952a8aa5507c30cd80901cce5dfacfdaf54c999cf6b3e391322ca216f2593f + md5: 2bc8d76acd818d7e79229f5157d5c156 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - lz4-c >=1.10.0,<1.11.0a0 + - zlib-ng >=2.2.2,<2.3.0a0 + - zstd >=1.5.6,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 341796 + timestamp: 1733447758492 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/c-blosc2-2.19.1-h9c47b6e_0.conda + sha256: a4e7042bc5ddc6eb91e375492412fb1bd958acc4e2f3323eb675d2aafd806f6a + md5: 2de310b1ae2c0d43125de29b9be71ca9 + depends: + - __osx >=11.0 + - libcxx >=19 + - lz4-c >=1.10.0,<1.11.0a0 + - zlib-ng >=2.2.4,<2.3.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 253741 + timestamp: 1752777447413 +- conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + sha256: 67cc7101b36421c5913a1687ef1b99f85b5d6868da3abbf6ec1a4181e79782fc + md5: 4492fd26db29495f0ba23f146cd5638d + depends: + - __unix + license: ISC + purls: [] + size: 147413 + timestamp: 1772006283803 +- conda: https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 + noarch: python + sha256: 561e6660f26c35d137ee150187d89767c988413c978e1b712d53f27ddf70ea17 + md5: 9b347a7ec10940d3f7941ff6c460b551 + depends: + - cached_property >=1.5.2,<1.5.3.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4134 + timestamp: 1615209571450 +- conda: https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 + sha256: 6dbf7a5070cc43d90a1e4c2ec0c541c69d8e30a0e25f50ce9f6e4a432e42c5d7 + md5: 576d629e47797577ab0f1b351297ef4a + depends: + - python >=3.6 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cached-property?source=hash-mapping + size: 11065 + timestamp: 1615209567874 +- conda: https://conda.anaconda.org/conda-forge/noarch/cachetools-7.0.5-pyhd8ed1ab_0.conda + sha256: edfecb626da69607f926f51ad0d24942bfe9f7a29391d55d4ac62403e878605b + md5: a66a1542c3ed584ca4fdb23955d81e91 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cachetools?source=hash-mapping + size: 19034 + timestamp: 1773120473852 +- conda: https://conda.anaconda.org/conda-forge/noarch/cachey-0.2.1-pyh9f0ad1d_0.tar.bz2 + sha256: c542041228ec2a3b0641891fc6ce986726397a437754acb35b594e20134958a8 + md5: 8583ac4d33e58a116d0a93c5e99e0fff + depends: + - heapdict + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cachey?source=hash-mapping + size: 9388 + timestamp: 1594854714059 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-h3394656_0.conda + sha256: 3bd6a391ad60e471de76c0e9db34986c4b5058587fbf2efa5a7f54645e28c2c7 + md5: 09262e66b19567aff4f592fb53b28760 + depends: + - __glibc >=2.17,<3.0.a0 + - fontconfig >=2.15.0,<3.0a0 + - fonts-conda-ecosystem + - freetype >=2.12.1,<3.0a0 + - icu >=75.1,<76.0a0 + - libexpat >=2.6.4,<3.0a0 + - libgcc >=13 + - libglib >=2.82.2,<3.0a0 + - libpng >=1.6.47,<1.7.0a0 + - libstdcxx >=13 + - libxcb >=1.17.0,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - pixman >=0.44.2,<1.0a0 + - xorg-libice >=1.1.2,<2.0a0 + - xorg-libsm >=1.2.5,<2.0a0 + - xorg-libx11 >=1.8.11,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrender >=0.9.12,<0.10.0a0 + license: LGPL-2.1-only or MPL-1.1 + purls: [] + size: 978114 + timestamp: 1741554591855 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.4-he0f2337_1.conda + sha256: cde9b79ee206fe3ba6ca2dc5906593fb7a1350515f85b2a1135a4ce8ec1539e3 + md5: 36200ecfbbfbcb82063c87725434161f + depends: + - __osx >=11.0 + - fontconfig >=2.15.0,<3.0a0 + - fonts-conda-ecosystem + - icu >=78.1,<79.0a0 + - libcxx >=19 + - libexpat >=2.7.3,<3.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libglib >=2.86.3,<3.0a0 + - libpng >=1.6.53,<1.7.0a0 + - libzlib >=1.3.1,<2.0a0 + - pixman >=0.46.4,<1.0a0 + license: LGPL-2.1-only or MPL-1.1 + purls: [] + size: 900035 + timestamp: 1766416416791 +- conda: https://conda.anaconda.org/conda-forge/noarch/canonicaljson-2.0.0-pyhd8ed1ab_0.conda + sha256: 2b73c926cf83265cf394ba9ba11839b0a7008ad2af1c55f1e8002f81cb682d00 + md5: 7d027ed4883d11a8ba7b27e0dd56df47 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/canonicaljson?source=hash-mapping + size: 13344 + timestamp: 1737528464034 +- conda: https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda + sha256: a6b118fd1ed6099dc4fc03f9c492b88882a780fadaef4ed4f93dc70757713656 + md5: 765c4d97e877cdbbb88ff33152b86125 + depends: + - python >=3.10 + license: ISC + purls: + - pkg:pypi/certifi?source=compressed-mapping + size: 151445 + timestamp: 1772001170301 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py310he7384ee_1.conda + sha256: bf76ead6d59b70f3e901476a73880ac92011be63b151972d135eec55bbbe6091 + md5: 803e2d778b8dcccdc014127ec5001681 + depends: + - __glibc >=2.17,<3.0.a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - pycparser + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cffi?source=hash-mapping + size: 244766 + timestamp: 1761203011221 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cffi-2.0.0-py310hf5b66c1_1.conda + sha256: 9a629f09b734795127b63b4880172e243fb2539107bbdd0203f3cd638fa131e3 + md5: 4e0516a8b6f96414d867af0228237a43 + depends: + - __osx >=11.0 + - libffi >=3.5.2,<3.6.0a0 + - pycparser + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cffi?source=hash-mapping + size: 236349 + timestamp: 1761203587122 +- conda: https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda + sha256: aa589352e61bb221351a79e5946d56916e3c595783994884accdb3b97fe9d449 + md5: 381bd45fb7aa032691f3063aff47e3a1 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cfgv?source=hash-mapping + size: 13589 + timestamp: 1763607964133 +- conda: https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.3-hecca717_0.conda + sha256: 53504e965499b4845ca3dc63d5905d5a1e686fcb9ab17e83c018efa479e787d0 + md5: 937ca49a245fcf2b88d51b6b52959426 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 161768 + timestamp: 1772712510770 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/charls-2.4.3-hf6b4638_0.conda + sha256: 1009bd6c2bb26e41dada4015793a1edf44d1320c7ca14fc646f89b0b51236e20 + md5: 91f1daf22f72792e11382938bb0dd9ac + depends: + - __osx >=11.0 + - libcxx >=19 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 118790 + timestamp: 1772712898684 +- conda: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.7-pyhd8ed1ab_0.conda + sha256: 3f9483d62ce24ecd063f8a5a714448445dc8d9e201147c46699fc0033e824457 + md5: a9167b9571f3baa9d448faa2139d1089 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/charset-normalizer?source=compressed-mapping + size: 58872 + timestamp: 1775127203018 +- pypi: git+https://github.com/NSLS2/chxtools?rev=main#04a3a27ceefa2436839e00f980be3f65aa4155e5 + name: chxtools + version: 3.0.3rc1 + requires_dist: + - databroker + - eiger-io + - matplotlib + - numpy + - pyepics + - pymongo + - scikit-beam + - scikit-image + - scipy +- conda: https://conda.anaconda.org/conda-forge/noarch/click-8.3.2-pyhc90fa1f_0.conda + sha256: 526d434cf5390310f40f34ea6ec4f0c225cdf1e419010e624d399b13b2059f0f + md5: 4d18bc3af7cfcea97bd817164672a08c + depends: + - __unix + - python + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/click?source=compressed-mapping + size: 98253 + timestamp: 1775578217828 +- conda: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda + sha256: 4c287c2721d8a34c94928be8fe0e9a85754e90189dd4384a31b1806856b50a67 + md5: 61b8078a0905b12529abc622406cb62c + depends: + - python >=3.10 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cloudpickle?source=hash-mapping + size: 27353 + timestamp: 1765303462831 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cmarkgfm-2024.11.20-py310h7c4b9e2_1.conda + sha256: a8ed33fb2d2a047d32803610d1040b209741e5b6d40dbce2fad1590d61da6651 + md5: c20b82e5fd7fc1d52fe419075df60305 + depends: + - __glibc >=2.17,<3.0.a0 + - cffi >=1.0.0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cmarkgfm?source=hash-mapping + size: 142718 + timestamp: 1760363090021 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cmarkgfm-2024.11.20-py310h7bdd564_1.conda + sha256: 2579d912ccff9dfa4f64a892a3b10c233da35be70e5a74896edba70c8e520d69 + md5: 644bb7a3eb90e7cab70da42d46de3e90 + depends: + - __osx >=11.0 + - cffi >=1.0.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/cmarkgfm?source=hash-mapping + size: 114013 + timestamp: 1760363349344 +- conda: https://conda.anaconda.org/conda-forge/noarch/codecov-2.1.13-pyhd8ed1ab_1.conda + sha256: 51ead85d30f4eeff41c558b24ab0992a6d9d08af3e887d3ac7d2c169670b807f + md5: d924fe46139596ebc3d4d424ec39ed51 + depends: + - coverage + - python >=3.9 + - requests >=2.7.9 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/codecov?source=hash-mapping + size: 21694 + timestamp: 1734975404103 +- conda: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda + sha256: ab29d57dc70786c1269633ba3dff20288b81664d3ff8d21af995742e2bb03287 + md5: 962b9857ee8e7018c22f2776ffa0b2d7 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/colorama?source=hash-mapping + size: 27011 + timestamp: 1733218222191 +- conda: https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.2-py310h3788b33_0.conda + sha256: 5231c1b68e01a9bc9debabc077a6fb48c4395206d59f40a4598d1d5e353e11d8 + md5: b6420d29123c7c823de168f49ccdfe6a + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - numpy >=1.23 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/contourpy?source=hash-mapping + size: 261280 + timestamp: 1744743236964 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.2-py310h7f4e7e6_0.conda + sha256: 758a7a858d8a5dca265e0754c73659690a99226e7e8d530666fece3b38e44558 + md5: 18ad60675af8d74a6e49bf40055419d0 + depends: + - __osx >=11.0 + - libcxx >=18 + - numpy >=1.23 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/contourpy?source=hash-mapping + size: 231970 + timestamp: 1744743542215 +- conda: https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.5-py310h3406613_0.conda + sha256: 96cd68ef97d335b7f038470726eb0cb8523e05b822c40826c9f961d027ccd023 + md5: 85cbfbcc0c49f7fb6da6259ce0048909 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - tomli + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/coverage?source=hash-mapping + size: 313105 + timestamp: 1773760891568 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/coverage-7.13.5-py310hb46c203_0.conda + sha256: c5a09adc9fa560786f84cca9a4035732d353824a2d95e76dc6f8d90a989a1ccd + md5: d51dedbbed0ecf7033ffe1c6751b0b54 + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - tomli + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/coverage?source=hash-mapping + size: 312305 + timestamp: 1773761467865 +- conda: https://conda.anaconda.org/conda-forge/noarch/cpython-3.10.20-py310hd8ed1ab_0.conda + noarch: generic + sha256: a1c44d450ca670741396fd8ed91ce346b0f14b584b192076dda8fe9b6add22d5 + md5: 8a191f9c5f06977c752bf348c7363633 + depends: + - python >=3.10,<3.11.0a0 + - python_abi * *_cp310 + license: Python-2.0 + purls: [] + size: 50791 + timestamp: 1772730686755 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cryptography-46.0.7-py310hb288b08_0.conda + sha256: 4b08d75510ffa2e8b1c97c7e52e5c73920941bee1075b6e28c2d717412007c95 + md5: 4b75ede0409f12819a914bcf81203e62 + depends: + - __glibc >=2.17,<3.0.a0 + - cffi >=1.14 + - libgcc >=14 + - openssl >=3.5.6,<4.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - typing_extensions >=4.13.2 + constrains: + - __glibc >=2.17 + license: Apache-2.0 AND BSD-3-Clause AND PSF-2.0 AND MIT + license_family: BSD + purls: + - pkg:pypi/cryptography?source=hash-mapping + size: 2397277 + timestamp: 1775637827435 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-46.0.7-py310h12cab78_0.conda + sha256: ab209d740cd07ff303d3772fb4e750d7da71ac1ef5b40a01b6e5eb3525059691 + md5: 2de63db95fd81c4c6b8744e926da2081 + depends: + - __osx >=11.0 + - cffi >=1.14 + - openssl >=3.5.6,<4.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - typing_extensions >=4.13.2 + constrains: + - __osx >=11.0 + license: Apache-2.0 AND BSD-3-Clause AND PSF-2.0 AND MIT + license_family: BSD + purls: + - pkg:pypi/cryptography?source=hash-mapping + size: 2255424 + timestamp: 1775638473095 +- conda: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda + sha256: bb47aec5338695ff8efbddbc669064a3b10fe34ad881fb8ad5d64fbfa6910ed1 + md5: 4c2a8fef270f6c69591889b93f9f55c1 + depends: + - python >=3.10 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cycler?source=hash-mapping + size: 14778 + timestamp: 1764466758386 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda + sha256: ee09ad7610c12c7008262d713416d0b58bf365bc38584dce48950025850bdf3f + md5: cae723309a49399d2949362f4ab5c9e4 + depends: + - __glibc >=2.17,<3.0.a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 + - libntlm >=1.8,<2.0a0 + - libstdcxx >=13 + - libxcrypt >=4.4.36 + - openssl >=3.5.0,<4.0a0 + license: BSD-3-Clause-Attribution + license_family: BSD + purls: [] + size: 209774 + timestamp: 1750239039316 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py310ha58568a_0.conda + sha256: cbe0b8ccc92d369e5b20ff298f56587b090348567392a42e072e1995af5e82d6 + md5: eed7be444431b44a1f5907847820f541 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/cython?source=hash-mapping + size: 3184293 + timestamp: 1767577052444 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cython-3.2.4-py310h9a762d2_0.conda + sha256: e2f547572b22597c51ad28ae2aab5f3fc5baabbff95bf30675ffd9ca7321671f + md5: 7c121ba38aa9ae356cfdc3407426e232 + depends: + - __osx >=11.0 + - libcxx >=19 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/cython?source=hash-mapping + size: 2922909 + timestamp: 1767577303294 +- conda: https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py310h7c4b9e2_2.conda + sha256: d523f8b090ef1ca44f72f50a8c76a2df225655bff99b470146029af722eab2fe + md5: 83d52a0c1500d0f8120966b678bcab80 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - toolz >=0.10.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cytoolz?source=hash-mapping + size: 598894 + timestamp: 1771855874334 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/cytoolz-1.1.0-py310h72544b6_2.conda + sha256: 1c1a03686e9cb478ee3530f6d76954438dac7cea578f021fbeb4be41e0b21a37 + md5: 8e7580a961e68d4a8c4173a88346f93d + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - toolz >=0.10.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/cytoolz?source=hash-mapping + size: 560250 + timestamp: 1771856766465 +- conda: https://conda.anaconda.org/conda-forge/noarch/dask-2026.3.0-pyhc364b38_0.conda + sha256: fe59c26dc20a47aa56fc38a2326f2a62403f3eed366837c1a0fba166a220d2b7 + md5: f9761ef056ba0ccef16e01cfceee62c2 + depends: + - python >=3.10 + - dask-core >=2026.3.0,<2026.3.1.0a0 + - distributed >=2026.3.0,<2026.3.1.0a0 + - cytoolz >=0.11.2 + - lz4 >=4.3.2 + - numpy >=1.26 + - pandas >=2.0 + - bokeh >=3.1.0 + - jinja2 >=2.10.3 + - pyarrow >=16.0 + - python + constrains: + - openssl !=1.1.1e + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 11383 + timestamp: 1773913283482 +- conda: https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.3.0-pyhc364b38_0.conda + sha256: 5497e56b12b8a07921668f6469d725be9826ffe5ae8a2f6f71d26369400b41ca + md5: 809f4cde7c853f437becc43415a2ecdf + depends: + - python >=3.10 + - click >=8.1 + - cloudpickle >=3.0.0 + - fsspec >=2021.9.0 + - packaging >=20.0 + - partd >=1.4.0 + - pyyaml >=5.3.1 + - toolz >=0.12.0 + - importlib-metadata >=4.13.0 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/dask?source=hash-mapping + size: 1066502 + timestamp: 1773823162829 +- conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0-pyhd8ed1ab_0.conda + sha256: 82d22ca12001cfbe18d1643f5fb6000a93c63e057249ba7f2620d5835ea620db + md5: ae6ace6405d2f03bb9c2b3f144732c08 + depends: + - area-detector-handlers + - bluesky-tiled-plugins + - boltons + - cachetools + - doct + - entrypoints + - event-model + - fastapi + - glue-core + - humanize + - jinja2 + - jsonschema + - mongomock + - mongoquery + - msgpack-python >=1.0.0 + - orjson + - pims + - pydantic + - pymongo + - python >=3.10 + - pytz + - requests + - starlette + - suitcase-mongo >=0.6.0 + - tiled >=0.1.0b39 + - tiled-client + - tiled-server + - toolz + - tzlocal + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/databroker?source=hash-mapping + size: 141191 + timestamp: 1763687269409 +- conda: https://conda.anaconda.org/conda-forge/noarch/databroker-2.0.0b38-pyhd8ed1ab_0.conda + sha256: c025d4a9dfbcd5294e19fe01915aca2324f89c2518411aafd23a020017269e8c + md5: 5e12b249844c84e54210b984213d0883 + depends: + - area-detector-handlers + - boltons + - cachetools + - doct + - entrypoints + - event-model + - fastapi + - humanize + - jinja2 + - jsonschema + - mongomock + - mongoquery + - msgpack-python >=1.0.0 + - orjson + - pims + - pydantic + - pymongo + - python >=3.6 + - pytz + - starlette + - suitcase-mongo >=0.5.0 + - tiled >=0.1.0a116 + - tiled-client + - tiled-server + - toolz + - tzlocal + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/databroker?source=hash-mapping + size: 141589 + timestamp: 1709517520870 +- conda: https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda + sha256: 22053a5842ca8ee1cf8e1a817138cdb5e647eb2c46979f84153f6ad7bde73020 + md5: 418c6ca5929a611cbd69204907a83995 + depends: + - libgcc-ng >=12 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 760229 + timestamp: 1685695754230 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda + sha256: 93e077b880a85baec8227e8c72199220c7f87849ad32d02c14fb3807368260b8 + md5: 5a74cdee497e6b65173e10d94582fae6 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 316394 + timestamp: 1685695959391 +- conda: https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda + sha256: 8bb557af1b2b7983cf56292336a1a1853f26555d9c6cecf1e5b2b96838c9da87 + md5: ce96f2f470d39bd96ce03945af92e280 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - libglib >=2.86.2,<3.0a0 + - libexpat >=2.7.3,<3.0a0 + license: AFL-2.1 OR GPL-2.0-or-later + purls: [] + size: 447649 + timestamp: 1764536047944 +- conda: https://conda.anaconda.org/conda-forge/noarch/decorator-5.2.1-pyhd8ed1ab_0.conda + sha256: c17c6b9937c08ad63cb20a26f403a3234088e57d4455600974a0ce865cb14017 + md5: 9ce473d1d1be1cc3810856a48b3fab32 + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/decorator?source=hash-mapping + size: 14129 + timestamp: 1740385067843 +- conda: https://conda.anaconda.org/conda-forge/noarch/dill-0.4.1-pyhcf101f3_0.conda + sha256: 1ef84c0cc4efd0c2d58c3cb365945edbd9ee42a1c54514d1ccba4b641005f757 + md5: 080a808fce955026bf82107d955d32da + depends: + - python >=3.10 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/dill?source=hash-mapping + size: 95462 + timestamp: 1768863743943 +- conda: https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda + sha256: 6d977f0b2fc24fee21a9554389ab83070db341af6d6f09285360b2e09ef8b26e + md5: 003b8ba0a94e2f1e117d0bd46aebc901 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/distlib?source=hash-mapping + size: 275642 + timestamp: 1752823081585 +- conda: https://conda.anaconda.org/conda-forge/noarch/distributed-2026.3.0-pyhc364b38_0.conda + sha256: 49cbb318f7a1797b9f17c135c9b5c48ba2086570a58c99054d3b40bf13a5b815 + md5: 8efb90a27e3b948514a428cb99f3fc70 + depends: + - python >=3.10 + - click >=8.0 + - cloudpickle >=3.0.0 + - cytoolz >=0.12.0 + - dask-core >=2026.3.0,<2026.3.1.0a0 + - jinja2 >=2.10.3 + - locket >=1.0.0 + - msgpack-python >=1.0.2 + - packaging >=20.0 + - psutil >=5.8.0 + - pyyaml >=5.4.1 + - sortedcontainers >=2.0.5 + - tblib >=1.6.0 + - toolz >=0.12.0 + - tornado >=6.2.0 + - urllib3 >=1.26.5 + - zict >=3.0.0 + - python + constrains: + - openssl !=1.1.1e + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/distributed?source=hash-mapping + size: 845608 + timestamp: 1773858577032 +- conda: https://conda.anaconda.org/conda-forge/noarch/dnspython-2.8.0-pyhcf101f3_0.conda + sha256: ef1e7b8405997ed3d6e2b6722bd7088d4a8adf215e7c88335582e65651fb4e05 + md5: d73fdc05f10693b518f52c994d748c19 + depends: + - python >=3.10,<4.0.0 + - sniffio + - python + constrains: + - aioquic >=1.2.0 + - cryptography >=45 + - httpcore >=1.0.0 + - httpx >=0.28.0 + - h2 >=4.2.0 + - idna >=3.10 + - trio >=0.30 + - wmi >=1.5.1 + license: ISC + purls: + - pkg:pypi/dnspython?source=hash-mapping + size: 196500 + timestamp: 1757292856922 +- conda: https://conda.anaconda.org/conda-forge/noarch/doct-1.1.0-pyhd8ed1ab_1.conda + sha256: dd9bb5fa0f71bf2abd6cb95cda8bb0d4c85c60d98e798a069e3a88b11fc9530f + md5: 0308aef1583bf38f319746d070802538 + depends: + - humanize + - prettytable + - python >=3.9 + - six + license: BSD 3-Clause + purls: + - pkg:pypi/doct?source=hash-mapping + size: 9642 + timestamp: 1734372607001 +- conda: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda + sha256: fa5966bb1718bbf6967a85075e30e4547901410cc7cb7b16daf68942e9a94823 + md5: 24c1ca34138ee57de72a943237cde4cc + depends: + - python >=3.9 + license: CC-PDDC AND BSD-3-Clause AND BSD-2-Clause AND ZPL-2.1 + purls: + - pkg:pypi/docutils?source=hash-mapping + size: 402700 + timestamp: 1733217860944 +- conda: https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.1-h5888daf_0.conda + sha256: 1bcc132fbcc13f9ad69da7aa87f60ea41de7ed4d09f3a00ff6e0e70e1c690bc2 + md5: bfd56492d8346d669010eccafe0ba058 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 69544 + timestamp: 1739569648873 +- conda: https://conda.anaconda.org/conda-forge/noarch/ecdsa-0.19.2-pyhd8ed1ab_0.conda + sha256: 279bba0bcb2248ec21807fcb1459b52abff42154811b85b4a0c62c54aba6773f + md5: d3422625946166c45d353f5b96fc02da + depends: + - gmpy2 + - python >=3.10 + - six >=1.9.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ecdsa?source=hash-mapping + size: 129113 + timestamp: 1774556826565 +- conda: https://conda.anaconda.org/conda-forge/noarch/echo-0.15.0-pyhd8ed1ab_0.conda + sha256: 1bc8ba15b0ba9d7d02ff2a2cd309a4eb0680360d81beb119bb569744588f67d2 + md5: d3445ebf229eefc9abfcf58e0f4f8be2 + depends: + - importlib_metadata + - numpy + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/echo?source=hash-mapping + size: 49073 + timestamp: 1775806041517 +- pypi: git+https://github.com/NSLS-II-CHX/eiger-io?rev=master#cb13bdc336e445697e6483556116aaba0368a5d3 + name: eiger-io + version: 0+untagged.47.gcb13bdc +- conda: https://conda.anaconda.org/conda-forge/noarch/email-validator-2.3.0-pyhd8ed1ab_0.conda + sha256: c37320864c35ef996b0e02e289df6ee89582d6c8e233e18dc9983375803c46bb + md5: 3bc0ac31178387e8ed34094d9481bfe8 + depends: + - dnspython >=2.0.0 + - idna >=2.0.0 + - python >=3.10 + license: Unlicense + purls: + - pkg:pypi/email-validator?source=hash-mapping + size: 46767 + timestamp: 1756221480106 +- conda: https://conda.anaconda.org/conda-forge/noarch/email_validator-2.3.0-hd8ed1ab_0.conda + sha256: 6a518e00d040fcad016fb2dde29672aa3476cd9ae33ea5b7b257222e66037d89 + md5: 2452e434747a6b742adc5045f2182a8e + depends: + - email-validator >=2.3.0,<2.3.1.0a0 + license: Unlicense + purls: [] + size: 7077 + timestamp: 1756221480651 +- conda: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + sha256: 80f579bfc71b3dab5bef74114b89e26c85cb0df8caf4c27ab5ffc16363d57ee7 + md5: 3366592d3c219f2731721f11bc93755c + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/entrypoints?source=hash-mapping + size: 11259 + timestamp: 1733327239578 +- conda: https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_1.conda + sha256: 2209534fbf2f70c20661ff31f57ab6a97b82ee98812e8a2dcb2b36a0d345727c + md5: 71bf9646cbfabf3022c8da4b6b4da737 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/et-xmlfile?source=hash-mapping + size: 21908 + timestamp: 1733749746332 +- conda: https://conda.anaconda.org/conda-forge/noarch/event-model-1.23.1-pyhd8ed1ab_1.conda + sha256: 008762de173833ec48a05f44097d58c6e37a99b3c29deb94fda52388bd84ac40 + md5: 93aaa9995c9d76d8717a4daf9f77b64a + depends: + - importlib-resources + - jsonschema >=3 + - numpy + - python >=3.10 + - typing_extensions + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/event-model?source=hash-mapping + size: 58043 + timestamp: 1762637675721 +- conda: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda + sha256: ee6cf346d017d954255bbcbdb424cddea4d14e4ed7e9813e429db1d795d01144 + md5: 8e662bd460bda79b1ea39194e3c4c9ab + depends: + - python >=3.10 + - typing_extensions >=4.6.0 + license: MIT and PSF-2.0 + purls: + - pkg:pypi/exceptiongroup?source=compressed-mapping + size: 21333 + timestamp: 1763918099466 +- conda: https://conda.anaconda.org/conda-forge/noarch/executing-2.2.1-pyhd8ed1ab_0.conda + sha256: 210c8165a58fdbf16e626aac93cc4c14dbd551a01d1516be5ecad795d2422cad + md5: ff9efb7f7469aed3c4a8106ffa29593c + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/executing?source=hash-mapping + size: 30753 + timestamp: 1756729456476 +- pypi: https://files.pythonhosted.org/packages/c0/d1/5ddb92494517fa5453c46872a87fd0d0e4a497543b353ce0aad6466f406a/fabio-2025.10.0-cp310-cp310-macosx_11_0_arm64.whl + name: fabio + version: 2025.10.0 + sha256: d92c8fe8f04c6b0adab095b11b27034896a5a2e13cd15ba0937115477f1aebde + requires_dist: + - numpy + - h5py + - hdf5plugin + - lxml + - pillow + - pyside6 ; extra == 'gui' + - matplotlib ; extra == 'gui' + - packaging ; extra == 'gui' + - pyside6 ; extra == 'all' + - matplotlib ; extra == 'all' + - packaging ; extra == 'all' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/fd/15/a70400eeea394dbcf34f926713499c3f7334b2ee0ec3639b794491ffc9c0/fabio-2025.10.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: fabio + version: 2025.10.0 + sha256: 5d5b324eb7eb427350cf6a639071b04858fdbedb6e5a61a8d69ef9aea780abbf + requires_dist: + - numpy + - h5py + - hdf5plugin + - lxml + - pillow + - pyside6 ; extra == 'gui' + - matplotlib ; extra == 'gui' + - packaging ; extra == 'gui' + - pyside6 ; extra == 'all' + - matplotlib ; extra == 'all' + - packaging ; extra == 'all' + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/fast-histogram-0.14-py310hf779ad0_4.conda + sha256: 5c9cc91cc4737645323da4895585c73936297175210e3f968a50905724165ad9 + md5: 85d139c6c12394e0c4ea78794be4e5d7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - numpy >=1.21,<3 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/fast-histogram?source=hash-mapping + size: 34027 + timestamp: 1761124385264 +- conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-0.135.3-hbd727af_0.conda + sha256: 076958a26a7429f0f21324f17d93f005e95e1927d2c0742a84426aa138236af4 + md5: b19a746ffc6b4fd78a571a3229aa7997 + depends: + - fastapi-core ==0.135.3 pyhcf101f3_0 + - email_validator + - fastapi-cli + - httpx + - jinja2 + - pydantic-settings + - pydantic-extra-types + - python-multipart + - uvicorn-standard + license: MIT + license_family: MIT + purls: + - pkg:pypi/fastapi?source=compressed-mapping + size: 4813 + timestamp: 1775065681410 +- conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-cli-0.0.23-pyhcf101f3_0.conda + sha256: cb60fc8c96dcd2a6335914d4d6d7d5f5549c9e1ff4533be28ba699e648babf37 + md5: 442ec6511754418c87a84bc1dc0c5384 + depends: + - python >=3.10 + - rich-toolkit >=0.14.8 + - tomli >=2.0.0 + - typer >=0.15.1 + - uvicorn-standard >=0.15.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/fastapi-cli?source=compressed-mapping + size: 18920 + timestamp: 1771293215825 +- conda: https://conda.anaconda.org/conda-forge/noarch/fastapi-core-0.135.3-pyhcf101f3_0.conda + sha256: a3d8d44e7b6519eac6b57faaf1c872f8865b125cf6ecb50d2d09467c558294c5 + md5: 1ed2eb0553d0b601604e20af3db2d9f9 + depends: + - python >=3.10 + - annotated-doc >=0.0.2 + - starlette >=0.46.0 + - typing_extensions >=4.8.0 + - typing-inspection >=0.4.2 + - pydantic >=2.9.0 + - python + constrains: + - email_validator >=2.0.0 + - fastapi-cli >=0.0.8 + - httpx >=0.23.0,<1.0.0 + - jinja2 >=3.1.5 + - pydantic-extra-types >=2.0.0 + - pydantic-settings >=2.0.0 + - python-multipart >=0.0.18 + - uvicorn-standard >=0.12.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/fastapi?source=compressed-mapping + size: 95900 + timestamp: 1775065681410 +- conda: https://conda.anaconda.org/conda-forge/noarch/fasteners-0.19-pyhd8ed1ab_1.conda + sha256: 42fb170778b47303e82eddfea9a6d1e1b8af00c927cd5a34595eaa882b903a16 + md5: dbe9d42e94b5ff7af7b7893f4ce052e7 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/fasteners?source=hash-mapping + size: 20711 + timestamp: 1734943237791 +- conda: https://conda.anaconda.org/conda-forge/noarch/filelock-3.25.2-pyhd8ed1ab_0.conda + sha256: dddea9ec53d5e179de82c24569d41198f98db93314f0adae6b15195085d5567f + md5: f58064cec97b12a7136ebb8a6f8a129b + depends: + - python >=3.10 + license: Unlicense + purls: + - pkg:pypi/filelock?source=compressed-mapping + size: 25845 + timestamp: 1773314012590 +- conda: https://conda.anaconda.org/conda-forge/noarch/flake8-7.3.0-pyhd8ed1ab_0.conda + sha256: a32e511ea71a9667666935fd9f497f00bcc6ed0099ef04b9416ac24606854d58 + md5: 04a55140685296b25b79ad942264c0ef + depends: + - mccabe >=0.7.0,<0.8.0 + - pycodestyle >=2.14.0,<2.15.0 + - pyflakes >=3.4.0,<3.5.0 + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/flake8?source=hash-mapping + size: 111916 + timestamp: 1750968083921 +- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 + sha256: 58d7f40d2940dd0a8aa28651239adbf5613254df0f75789919c4e6762054403b + md5: 0c96522c6bdaed4b1566d11387caaf45 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 397370 + timestamp: 1566932522327 +- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 + sha256: c52a29fdac682c20d252facc50f01e7c2e7ceac52aa9817aaf0bb83f7559ec5c + md5: 34893075a5c9e55cdafac56607368fc6 + license: OFL-1.1 + license_family: Other + purls: [] + size: 96530 + timestamp: 1620479909603 +- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 + sha256: 00925c8c055a2275614b4d983e1df637245e19058d79fc7dd1a93b8d9fb4b139 + md5: 4d59c254e01d9cde7957100457e2d5fb + license: OFL-1.1 + license_family: Other + purls: [] + size: 700814 + timestamp: 1620479612257 +- conda: https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda + sha256: 2821ec1dc454bd8b9a31d0ed22a7ce22422c0aef163c59f49dfdf915d0f0ca14 + md5: 49023d73832ef61042f6a237cb2687e7 + license: LicenseRef-Ubuntu-Font-Licence-Version-1.0 + license_family: Other + purls: [] + size: 1620504 + timestamp: 1727511233259 +- conda: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda + sha256: aa4a44dba97151221100a637c7f4bde619567afade9c0265f8e1c8eed8d7bd8c + md5: 867127763fbe935bab59815b6e0b7b5c + depends: + - __glibc >=2.17,<3.0.a0 + - libexpat >=2.7.4,<3.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libgcc >=14 + - libuuid >=2.41.3,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 270705 + timestamp: 1771382710863 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.17.1-h2b252f5_0.conda + sha256: 851e9c778bfc54645dcab7038c0383445cbebf16f6bb2d3f62ce422b1605385a + md5: d06ae1a11b46cc4c74177ecd28de7c7a + depends: + - __osx >=11.0 + - libexpat >=2.7.4,<3.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libzlib >=1.3.1,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 237308 + timestamp: 1771382999247 +- conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 + sha256: a997f2f1921bb9c9d76e6fa2f6b408b7fa549edd349a77639c9fe7a23ea93e61 + md5: fee5683a3f04bd15cbd8318b096a27ab + depends: + - fonts-conda-forge + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 3667 + timestamp: 1566974674465 +- conda: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda + sha256: 54eea8469786bc2291cc40bca5f46438d3e062a399e8f53f013b6a9f50e98333 + md5: a7970cd949a077b7cb9696379d338681 + depends: + - font-ttf-ubuntu + - font-ttf-inconsolata + - font-ttf-dejavu-sans-mono + - font-ttf-source-code-pro + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4059 + timestamp: 1762351264405 +- conda: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.62.0-py310h3406613_0.conda + sha256: 19bd6f1137310e8a23d6b341d325d24aaf89a5234b831fbb26759b7b0168a2c1 + md5: 5ec9c93a61d857c8bdefdcebd49a5ef8 + depends: + - __glibc >=2.17,<3.0.a0 + - brotli + - libgcc >=14 + - munkres + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - unicodedata2 >=15.1.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/fonttools?source=hash-mapping + size: 2451656 + timestamp: 1773137442143 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.62.0-py310hb46c203_0.conda + sha256: ace28679c59a7292bca5f886c8d5a4a82570fd62e49147851b0fa9ae013bf531 + md5: c9f417e61f1ac1635b1bf7bdbf5bbd2b + depends: + - __osx >=11.0 + - brotli + - munkres + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - unicodedata2 >=15.1.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/fonttools?source=hash-mapping + size: 2399134 + timestamp: 1773156735595 +- conda: https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.3-ha770c72_0.conda + sha256: c934c385889c7836f034039b43b05ccfa98f53c900db03d8411189892ced090b + md5: 8462b5322567212beeb025f3519fb3e2 + depends: + - libfreetype 2.14.3 ha770c72_0 + - libfreetype6 2.14.3 h73754d4_0 + license: GPL-2.0-only OR FTL + purls: [] + size: 173839 + timestamp: 1774298173462 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/freetype-2.14.3-hce30654_0.conda + sha256: 5952bd9db12207a18a112e8924aa2ce8c2f9d57b62584d58a97d2f6afe1ea324 + md5: 6dcc75ba2e04c555e881b72793d3282f + depends: + - libfreetype 2.14.3 hce30654_0 + - libfreetype6 2.14.3 hdfa99f5_0 + license: GPL-2.0-only OR FTL + purls: [] + size: 173313 + timestamp: 1774298702053 +- conda: https://conda.anaconda.org/conda-forge/noarch/freetype-py-2.3.0-pyhd8ed1ab_0.tar.bz2 + sha256: bfa5ddb943992643a2ee2e258507beac2bdb17825e74d05f1bb0b79bb2c924fc + md5: e4a165cdbbaed5bbb6e653b823156151 + depends: + - freetype + - python >=3.7 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/freetype-py?source=hash-mapping + size: 58932 + timestamp: 1650983451848 +- conda: https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.3.0-pyhd8ed1ab_0.conda + sha256: b4a7aec32167502dd4a2d1fb1208c63760828d7111339aa5b305b2d776afa70f + md5: c18d2ba7577cdc618a20d45f1e31d14b + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/fsspec?source=hash-mapping + size: 148973 + timestamp: 1774699581537 +- conda: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_2.conda + sha256: 45dfd037889b7075c5eb46394f93172de0be0b1624c7f802dd3ecc94b814d8e0 + md5: 1054c53c95d85e35b88143a3eda66373 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/future?source=hash-mapping + size: 364561 + timestamp: 1738926525117 +- conda: https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda + sha256: 08896dcd94e14a83f247e91748444e610f344ab42d80cbf2b6082b481c3f8f4b + md5: 4d4efd0645cd556fab54617c4ad477ef + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: LGPL-2.1-only + purls: [] + size: 1974942 + timestamp: 1761593471198 +- conda: https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda + sha256: 6c33bf0c4d8f418546ba9c250db4e4221040936aef8956353bc764d4877bc39a + md5: d411fc29e338efb48c5fd4576d71d881 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 119654 + timestamp: 1726600001928 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gflags-2.2.2-hf9b8971_1005.conda + sha256: fd56ed8a1dab72ab90d8a8929b6f916a6d9220ca297ff077f8f04c5ed3408e20 + md5: 57a511a5905caa37540eb914dfcbf1fb + depends: + - __osx >=11.0 + - libcxx >=17 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 82090 + timestamp: 1726600145480 +- conda: https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda + sha256: aac402a8298f0c0cc528664249170372ef6b37ac39fdc92b40601a6aed1e32ff + md5: 3bf7b9fd5a7136126e0234db4b87c8b6 + depends: + - libgcc-ng >=12 + license: MIT + license_family: MIT + purls: [] + size: 77248 + timestamp: 1712692454246 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/giflib-5.2.2-h93a5062_0.conda + sha256: 843b3f364ff844137e37d5c0a181f11f6d51adcedd216f019d074e5aa5d7e09c + md5: 95fa1486c77505330c20f7202492b913 + license: MIT + license_family: MIT + purls: [] + size: 71613 + timestamp: 1712692611426 +- conda: https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda + sha256: dc824dc1d0aa358e28da2ecbbb9f03d932d976c8dca11214aa1dcdfcbd054ba2 + md5: ff862eebdfeb2fd048ae9dc92510baca + depends: + - gflags >=2.2.2,<2.3.0a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 143452 + timestamp: 1718284177264 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/glog-0.7.1-heb240a5_0.conda + sha256: 9fc77de416953aa959039db72bc41bfa4600ae3ff84acad04a7d0c1ab9552602 + md5: fef68d0a95aa5b84b5c1a4f6f3bf40e1 + depends: + - __osx >=11.0 + - gflags >=2.2.2,<2.3.0a0 + - libcxx >=16 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 112215 + timestamp: 1718284365403 +- conda: https://conda.anaconda.org/conda-forge/noarch/glue-core-1.25.0-pyhd8ed1ab_0.conda + sha256: d5f578acce521e35754b3c8d2b71fe798ce5e0fe1b548e7a4c713e49b4d8dab2 + md5: d3cc1cd4ce8cdeced596cccecb661956 + depends: + - astropy-base >=4.0 + - dill >=0.2 + - echo >=0.12 + - fast-histogram >=0.12 + - h5py >=2.10 + - importlib-metadata >=3.6 + - importlib-resources >=1.3 + - ipython >=4.0 + - matplotlib-base >=3.2 + - mpl-scatter-density >=0.8 + - numpy >=1.17 + - openpyxl >=3.0 + - pandas >=1.2 + - python >=3.10 + - scikit-image + - scipy >=1.1 + - shapely >=2.0 + - xlrd >=1.2 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/glue-core?source=hash-mapping + size: 843687 + timestamp: 1770884091729 +- conda: https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda + sha256: 309cf4f04fec0c31b6771a5809a1909b4b3154a2208f52351e1ada006f4c750c + md5: c94a5994ef49749880a8139cf9afcbe1 + depends: + - libgcc-ng >=12 + - libstdcxx-ng >=12 + license: GPL-2.0-or-later OR LGPL-3.0-or-later + purls: [] + size: 460055 + timestamp: 1718980856608 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmp-6.3.0-h7bae524_2.conda + sha256: 76e222e072d61c840f64a44e0580c2503562b009090f55aa45053bf1ccb385dd + md5: eed7278dfbab727b56f2c0b64330814b + depends: + - __osx >=11.0 + - libcxx >=16 + license: GPL-2.0-or-later OR LGPL-3.0-or-later + purls: [] + size: 365188 + timestamp: 1718981343258 +- conda: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.3.0-py310h63ebcad_1.conda + sha256: 47859547d394a2aaf8d0e749315a2f2e162a0d39e9c45550ced18fad7a60ce06 + md5: 82dcd4602e6ad2e016b3f15894d56d84 + depends: + - __glibc >=2.17,<3.0.a0 + - gmp >=6.3.0,<7.0a0 + - libgcc >=14 + - mpc >=1.3.1,<2.0a0 + - mpfr >=4.2.1,<5.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: LGPL-3.0-or-later + license_family: LGPL + purls: + - pkg:pypi/gmpy2?source=hash-mapping + size: 241115 + timestamp: 1773245095153 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/gmpy2-2.3.0-py310h6ac7f53_1.conda + sha256: 2da35876d9b85b8e93ea814a357a22e3d5cb9d26055057b957c6513e38ccb03c + md5: daec1fe605be6771bc579c1026aad61a + depends: + - __osx >=11.0 + - gmp >=6.3.0,<7.0a0 + - mpc >=1.3.1,<2.0a0 + - mpfr >=4.2.1,<5.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: LGPL-3.0-or-later + license_family: LGPL + purls: + - pkg:pypi/gmpy2?source=hash-mapping + size: 190872 + timestamp: 1773245375532 +- conda: https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda + sha256: 25ba37da5c39697a77fce2c9a15e48cf0a84f1464ad2aafbe53d8357a9f6cc8c + md5: 2cd94587f3a401ae05e03a6caf09539d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: LGPL-2.0-or-later + license_family: LGPL + purls: [] + size: 99596 + timestamp: 1755102025473 +- conda: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.4.0-py310h25320af_0.conda + sha256: 1e64dbd1888d923401c286031fb6e812743a35e28e3446a02e202d2320607ce2 + md5: dd6e04f2a37c92255b77b70f86c1f06c + depends: + - python + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/greenlet?source=hash-mapping + size: 238511 + timestamp: 1775678833875 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.4.0-py310h19b6747_0.conda + sha256: a2a96dca693fc641bc848cd3fbf3261a2f31d812c8c6b276722d45019a695320 + md5: 86178203c1a9b01c1d1cb8f853acb6f0 + depends: + - python + - __osx >=11.0 + - python 3.10.* *_cpython + - libcxx >=19 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/greenlet?source=hash-mapping + size: 234400 + timestamp: 1775679019165 +- conda: https://conda.anaconda.org/conda-forge/noarch/h11-0.16.0-pyhcf101f3_1.conda + sha256: 96cac6573fd35ae151f4d6979bab6fbc90cb6b1fb99054ba19eb075da9822fcb + md5: b8993c19b0c32a2f7b66cbb58ca27069 + depends: + - python >=3.10 + - typing_extensions + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/h11?source=compressed-mapping + size: 39069 + timestamp: 1767729720872 +- conda: https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda + sha256: 84c64443368f84b600bfecc529a1194a3b14c3656ee2e832d15a20e0329b6da3 + md5: 164fc43f0b53b6e3a7bc7dce5e4f1dc9 + depends: + - python >=3.10 + - hyperframe >=6.1,<7 + - hpack >=4.1,<5 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/h2?source=compressed-mapping + size: 95967 + timestamp: 1756364871835 +- conda: https://conda.anaconda.org/conda-forge/noarch/h5netcdf-1.8.1-pyhd8ed1ab_0.conda + sha256: 5bf081c0f21a57fc84b5000d53f043d63638b77dcc2137f87511a4581838c9f6 + md5: ca7f9ba8762d3e360e47917a10e23760 + depends: + - h5py + - numpy + - packaging + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/h5netcdf?source=hash-mapping + size: 57732 + timestamp: 1769241877548 +- conda: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.16.0-nompi_py310h4aa865e_102.conda + sha256: e2f5169e2da674c83e678e7ec0ed6f969e5830a4bd23606ef610b2b18dde1090 + md5: e5f4aa8cdc28aa431bd54f4d8b2ed24f + depends: + - __glibc >=2.17,<3.0.a0 + - cached-property + - hdf5 >=1.14.6,<1.14.7.0a0 + - libgcc >=14 + - numpy >=1.21,<3 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/h5py?source=hash-mapping + size: 1252638 + timestamp: 1775581283560 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.16.0-nompi_py310h0c5f886_102.conda + sha256: 515c30d2ac4809703201b746a1656b5bcfbf6ecc45849caec4684d131acbc82e + md5: 54735b6dd3364328d2e1fa9d2601b8be + depends: + - __osx >=11.0 + - cached-property + - hdf5 >=1.14.6,<1.14.7.0a0 + - numpy >=1.21,<3 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/h5py?source=hash-mapping + size: 1103506 + timestamp: 1775581597111 +- conda: https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.2.0-h15599e2_0.conda + sha256: 6bd8b22beb7d40562b2889dc68232c589ff0d11a5ad3addd41a8570d11f039d9 + md5: b8690f53007e9b5ee2c2178dd4ac778c + depends: + - __glibc >=2.17,<3.0.a0 + - cairo >=1.18.4,<2.0a0 + - graphite2 >=1.3.14,<2.0a0 + - icu >=75.1,<76.0a0 + - libexpat >=2.7.1,<3.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libgcc >=14 + - libglib >=2.86.1,<3.0a0 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 2411408 + timestamp: 1762372726141 +- conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda + sha256: 1fc50ce3b86710fba3ec9c5714f1612b5ffa4230d70bfe43e2a1436eacba1621 + md5: c223ee1429ba538f3e48cfb4a0b97357 + depends: + - __glibc >=2.17,<3.0.a0 + - libaec >=1.1.5,<2.0a0 + - libcurl >=8.18.0,<9.0a0 + - libgcc >=14 + - libgfortran + - libgfortran5 >=14.3.0 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.5,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 3708864 + timestamp: 1770390337946 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.6-nompi_had3affe_108.conda + sha256: 997c7c875d70873fbd931f44aa813f98e3195bdc80957b5bb24dacb859ad7b20 + md5: da1f9cc54397e702a1ace51e2800a066 + depends: + - __osx >=11.0 + - libaec >=1.1.5,<2.0a0 + - libcurl >=8.19.0,<9.0a0 + - libcxx >=19 + - libgfortran + - libgfortran5 >=14.3.0 + - libzlib >=1.3.2,<2.0a0 + - openssl >=3.5.5,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 3292751 + timestamp: 1774407465085 +- pypi: https://files.pythonhosted.org/packages/9c/13/15017f6210bfea843316d62f0f121e364e17bb129444ed803a256a213036/hdf5plugin-6.0.0-py3-none-macosx_10_13_universal2.whl + name: hdf5plugin + version: 6.0.0 + sha256: a59fbd5d4290a8a5334d82ccb4c6b9bfc7aaf586de7fedb88762e8601bc05fd4 + requires_dist: + - h5py>=3.0.0 + - ipython ; extra == 'doc' + - nbsphinx ; extra == 'doc' + - sphinx ; extra == 'doc' + - sphinx-autodoc-typehints ; extra == 'doc' + - sphinx-rtd-theme ; extra == 'doc' + - numpy<2 ; python_full_version == '3.9.*' and extra == 'test' + - blosc2>=2.5.1 ; extra == 'test' + - blosc2-grok>=0.2.2 ; extra == 'test' + - hdf5plugin[doc,test] ; extra == 'dev' + - bandit ; extra == 'dev' + - black ; extra == 'dev' + - flake8 ; extra == 'dev' + - isort ; extra == 'dev' + requires_python: '>=3.9' +- conda: https://conda.anaconda.org/conda-forge/linux-64/hdf5plugin-5.1.0-py310h887a449_1.conda + sha256: afdb73c3de8f7fdcfc546dc3e9c32504593abc219e3eabf46145ed68f1984a52 + md5: 96ed0d688680c46848ea33fe2a481e8c + depends: + - __glibc >=2.17,<3.0.a0 + - h5py >=3.0.0 + - hdf5 >=1.14.6,<1.14.7.0a0 + - libgcc >=13 + - libstdcxx >=13 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/hdf5plugin?source=hash-mapping + size: 4363448 + timestamp: 1746451085077 +- conda: https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-pyhd8ed1ab_2.conda + sha256: 028d035e09e5119e2defee4e8387460b0e31429616aa0999392ec0ad20da6181 + md5: 9f203f36c466edeced192b7c5694c480 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/heapdict?source=hash-mapping + size: 10169 + timestamp: 1733762704231 +- conda: https://conda.anaconda.org/conda-forge/noarch/historydict-1.2.6-pyhd8ed1ab_1.conda + sha256: 325656b615af237c60f7596b40d654b489af4d4d128aa936c3ef287a5bd6a8b9 + md5: 3f055e6e8646745be340ce4b08092328 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/historydict?source=hash-mapping + size: 11249 + timestamp: 1734382711342 +- conda: https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda + sha256: 6ad78a180576c706aabeb5b4c8ceb97c0cb25f1e112d76495bff23e3779948ba + md5: 0a802cb9888dd14eeefc611f05c40b6e + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/hpack?source=hash-mapping + size: 30731 + timestamp: 1737618390337 +- conda: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.9-pyh29332c3_0.conda + sha256: 04d49cb3c42714ce533a8553986e1642d0549a05dc5cc48e0d43ff5be6679a5b + md5: 4f14640d58e2cc0aa0819d9d8ba125bb + depends: + - python >=3.9 + - h11 >=0.16 + - h2 >=3,<5 + - sniffio 1.* + - anyio >=4.0,<5.0 + - certifi + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/httpcore?source=hash-mapping + size: 49483 + timestamp: 1745602916758 +- conda: https://conda.anaconda.org/conda-forge/linux-64/httptools-0.7.1-py310h7c4b9e2_1.conda + sha256: d5d75c05b388c72c0befa7a4f18bba1c3db7e422cd342698eab97e98cacbe033 + md5: d3d0e87704846f6702f795952fa84ac3 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/httptools?source=hash-mapping + size: 97534 + timestamp: 1762504037538 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/httptools-0.7.1-py310hfe3a0ae_1.conda + sha256: e8d83d8608f4774e01f3e465d349f79dae653cafd1c57f94a96935a034032f85 + md5: e7a1bea0ce19c9a2cde284dc72ea2f1c + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/httptools?source=hash-mapping + size: 88783 + timestamp: 1762504520995 +- conda: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda + sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 + md5: d6989ead454181f4f9bc987d3dc4e285 + depends: + - anyio + - certifi + - httpcore 1.* + - idna + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/httpx?source=hash-mapping + size: 63082 + timestamp: 1733663449209 +- conda: https://conda.anaconda.org/conda-forge/noarch/humanize-4.15.0-pyhd8ed1ab_0.conda + sha256: 6c4343b376d0b12a4c75ab992640970d36c933cad1fd924f6a1181fa91710e80 + md5: daddf757c3ecd6067b9af1df1f25d89e + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/humanize?source=hash-mapping + size: 67994 + timestamp: 1766267728652 +- conda: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda + sha256: 77af6f5fe8b62ca07d09ac60127a30d9069fdc3c68d6b256754d0ffb1f7779f8 + md5: 8e6923fc12f1fe8f8c4e5c9f343256ac + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/hyperframe?source=hash-mapping + size: 17397 + timestamp: 1737618427549 +- conda: https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda + sha256: 71e750d509f5fa3421087ba88ef9a7b9be11c53174af3aa4d06aff4c18b38e8e + md5: 8b189310083baabfb622af68fd9d3ae3 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + license: MIT + license_family: MIT + purls: [] + size: 12129203 + timestamp: 1720853576813 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.3-hef89b57_0.conda + sha256: 3a7907a17e9937d3a46dfd41cffaf815abad59a569440d1e25177c15fd0684e5 + md5: f1182c91c0de31a7abd40cedf6a5ebef + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 12361647 + timestamp: 1773822915649 +- conda: https://conda.anaconda.org/conda-forge/noarch/id-1.6.1-pyhcf101f3_0.conda + sha256: 54c80a4ca6e6a19b4bb89c829f757d0de00362a3bfa4647517d2ebd519717f0f + md5: 563a022fc58cf7a200c35cb3fee07a6b + depends: + - python >=3.10 + - urllib3 >=2,<3 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/id?source=hash-mapping + size: 27972 + timestamp: 1770237711404 +- conda: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.18-pyhd8ed1ab_0.conda + sha256: 3bae1b612ccc71e49c5795a369a82c4707ae6fd4e63360e8ecc129f9539f779b + md5: 635d1a924e1c55416fce044ed96144a2 + depends: + - python >=3.10 + - ukkonen + license: MIT + license_family: MIT + purls: + - pkg:pypi/identify?source=hash-mapping + size: 79749 + timestamp: 1774239544252 +- conda: https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda + sha256: ae89d0299ada2a3162c2614a9d26557a92aa6a77120ce142f8e0109bbf0342b0 + md5: 53abe63df7e10a6ba605dc5f9f961d36 + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/idna?source=hash-mapping + size: 50721 + timestamp: 1760286526795 +- conda: https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.12.30-py310h78a9a29_0.conda + sha256: 13da21dfa6428f709d836703b2654e52aa96d7a1581afa37352982915e570add + md5: e0c50079904122427bcf52e1afcd1cdb + depends: + - __glibc >=2.17,<3.0.a0 + - blosc >=1.21.6,<2.0a0 + - brunsli >=0.1,<1.0a0 + - bzip2 >=1.0.8,<2.0a0 + - c-blosc2 >=2.15.2,<2.16.0a0 + - charls >=2.4.2,<2.5.0a0 + - giflib >=5.2.2,<5.3.0a0 + - jxrlib >=1.1,<1.2.0a0 + - lcms2 >=2.16,<3.0a0 + - lerc >=4.0.0,<5.0a0 + - libaec >=1.1.3,<2.0a0 + - libavif16 >=1.1.1,<2.0a0 + - libbrotlicommon >=1.1.0,<1.2.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libdeflate >=1.23,<1.24.0a0 + - libgcc >=13 + - libjpeg-turbo >=3.0.0,<4.0a0 + - libjxl >=0.11,<0.12.0a0 + - liblzma >=5.6.3,<6.0a0 + - libpng >=1.6.45,<1.7.0a0 + - libstdcxx >=13 + - libtiff >=4.7.0,<4.8.0a0 + - libwebp-base >=1.5.0,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - libzopfli >=1.0.3,<1.1.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - numpy >=1.19,<3 + - openjpeg >=2.5.3,<3.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - snappy >=1.2.1,<1.3.0a0 + - zfp >=1.0.1,<2.0a0 + - zlib-ng >=2.2.3,<2.3.0a0 + - zstd >=1.5.6,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/imagecodecs?source=hash-mapping + size: 2073743 + timestamp: 1736603669881 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/imagecodecs-2025.3.30-py310hc9b329b_2.conda + sha256: 8ae10fe3bef5f506d1cb03dd80fec79f1093615989b20e8067c32f2fe640ae85 + md5: b9ff78f0d7494708f9a4325d749ecfc0 + depends: + - __osx >=11.0 + - blosc >=1.21.6,<2.0a0 + - brunsli >=0.1,<1.0a0 + - bzip2 >=1.0.8,<2.0a0 + - c-blosc2 >=2.19.0,<2.20.0a0 + - charls >=2.4.2,<2.5.0a0 + - giflib >=5.2.2,<5.3.0a0 + - jxrlib >=1.1,<1.2.0a0 + - lcms2 >=2.17,<3.0a0 + - lerc >=4.0.0,<5.0a0 + - libaec >=1.1.4,<2.0a0 + - libavif16 >=1.3.0,<2.0a0 + - libbrotlicommon >=1.1.0,<1.2.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libcxx >=18 + - libdeflate >=1.24,<1.25.0a0 + - libjpeg-turbo >=3.1.0,<4.0a0 + - libjxl >=0.11,<0.12.0a0 + - liblzma >=5.8.1,<6.0a0 + - libpng >=1.6.49,<1.7.0a0 + - libtiff >=4.7.0,<4.8.0a0 + - libwebp-base >=1.5.0,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - libzopfli >=1.0.3,<1.1.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - numpy >=1.21,<3 + - openjpeg >=2.5.3,<3.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - snappy >=1.2.1,<1.3.0a0 + - zfp >=1.0.1,<2.0a0 + - zlib-ng >=2.2.4,<2.3.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/imagecodecs?source=hash-mapping + size: 1659154 + timestamp: 1750867609256 +- conda: https://conda.anaconda.org/conda-forge/noarch/imageio-2.37.0-pyhfb79c49_0.conda + sha256: 8ef69fa00c68fad34a3b7b260ea774fda9bd9274fd706d3baffb9519fd0063fe + md5: b5577bc2212219566578fd5af9993af6 + depends: + - numpy + - pillow >=8.3.2 + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/imageio?source=hash-mapping + size: 293226 + timestamp: 1738273949742 +- conda: https://conda.anaconda.org/conda-forge/noarch/imagesize-2.0.0-pyhd8ed1ab_0.conda + sha256: 5a047f9eac290e679b4e6f6f4cbfcc5acdfbf031a4f06824d4ddb590cdbb850b + md5: 92617c2ba2847cca7a6ed813b6f4ab79 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/imagesize?source=hash-mapping + size: 15729 + timestamp: 1773752188889 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.8.0-pyhcf101f3_0.conda + sha256: 82ab2a0d91ca1e7e63ab6a4939356667ef683905dea631bc2121aa534d347b16 + md5: 080594bf4493e6bae2607e65390c520a + depends: + - python >=3.10 + - zipp >=3.20 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/importlib-metadata?source=compressed-mapping + size: 34387 + timestamp: 1773931568510 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-7.1.0-pyhd8ed1ab_0.conda + sha256: 6a2f86ef0965605d742b5b94229bf8b829258d0a9f640e3651901cc72ef9a0a5 + md5: e3bffa82b874f8b9a2631bddb3869529 + depends: + - importlib_resources >=7.1.0,<7.1.1.0a0 + - python >=3.10 + license: Apache-2.0 + purls: + - pkg:pypi/importlib-resources?source=compressed-mapping + size: 10354 + timestamp: 1776068852701 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.8.0-h8f7a5dd_0.conda + sha256: 09f2b26f8c727fd2138fd4846b91708c32d5684120b59d5c8d38472c0eefbf33 + md5: 12e7a110add59a05b337484568a83a4d + depends: + - importlib-metadata ==8.8.0 pyhcf101f3_0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 21425 + timestamp: 1773931568510 +- conda: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-7.1.0-pyhd8ed1ab_0.conda + sha256: a563a51aa522998172838e867e6dedcf630bc45796e8612f5a1f6d73e9c8125a + md5: 0ba6225c279baf7ea9473a62ea0ec9ae + depends: + - python >=3.10 + - zipp >=3.1.0 + constrains: + - importlib-resources >=7.1.0,<7.1.1.0a0 + license: Apache-2.0 + purls: + - pkg:pypi/importlib-resources?source=compressed-mapping + size: 34809 + timestamp: 1776068839274 +- conda: https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda + sha256: e1a9e3b1c8fe62dc3932a616c284b5d8cbe3124bbfbedcf4ce5c828cb166ee19 + md5: 9614359868482abba1bd15ce465e3c42 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/iniconfig?source=compressed-mapping + size: 13387 + timestamp: 1760831448842 +- conda: https://conda.anaconda.org/conda-forge/noarch/ipython-8.37.0-pyh8f84b5b_0.conda + sha256: e43fa762183b49c3c3b811d41259e94bb14b7bff4a239b747ef4e1c6bbe2702d + md5: 177cfa19fe3d74c87a8889286dc64090 + depends: + - __unix + - pexpect >4.3 + - decorator + - exceptiongroup + - jedi >=0.16 + - matplotlib-inline + - pickleshare + - prompt-toolkit >=3.0.41,<3.1.0 + - pygments >=2.4.0 + - python >=3.10 + - stack_data + - traitlets >=5.13.0 + - typing_extensions >=4.6 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/ipython?source=hash-mapping + size: 639160 + timestamp: 1748711175284 +- conda: https://conda.anaconda.org/conda-forge/noarch/isort-8.0.1-pyhd8ed1ab_0.conda + sha256: cc5c2b513143ea9675ba5b3570182f7568fd1029b299ee3bc58424dcce8c5539 + md5: 98cdd8615792e90da1023bc546f806d9 + depends: + - importlib-metadata >=4.6.0 + - python >=3.10,<4.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/isort?source=hash-mapping + size: 72146 + timestamp: 1772278531671 +- conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.classes-3.4.0-pyhcf101f3_3.conda + sha256: 3cc991f0f09dfd00d2626e745ba68da03e4f1dcbb7b36dd20f7a7373643cd5d5 + md5: d59568bad316413c89831456e691de29 + depends: + - python >=3.10 + - more-itertools + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jaraco-classes?source=hash-mapping + size: 14831 + timestamp: 1767294269456 +- conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.context-6.1.1-pyhcf101f3_0.conda + sha256: 49c3e2e9aa4930734badfcbb31543406ed1b5531cb833f595cf57baf628dea7d + md5: 5ed60de12f1673398943262371667f79 + depends: + - python >=3.10 + - backports.tarfile + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jaraco-context?source=hash-mapping + size: 15368 + timestamp: 1773131463776 +- conda: https://conda.anaconda.org/conda-forge/noarch/jaraco.functools-4.4.0-pyhcf101f3_1.conda + sha256: 6a91447b3bb4d7ae94cc0d77ed12617796629aee11111efe7ea43cbd0e113bda + md5: aa83cc08626bf6b613a3103942be8951 + depends: + - python >=3.10 + - more-itertools + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jaraco-functools?source=hash-mapping + size: 18744 + timestamp: 1767294193246 +- conda: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda + sha256: 92c4d217e2dc68983f724aa983cca5464dcb929c566627b26a2511159667dba8 + md5: a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 + depends: + - parso >=0.8.3,<0.9.0 + - python >=3.9 + license: Apache-2.0 AND MIT + purls: + - pkg:pypi/jedi?source=hash-mapping + size: 843646 + timestamp: 1733300981994 +- conda: https://conda.anaconda.org/conda-forge/noarch/jeepney-0.9.0-pyhd8ed1ab_0.conda + sha256: 00d37d85ca856431c67c8f6e890251e7cc9e5ef3724a0302b8d4a101f22aa27f + md5: b4b91eb14fbe2f850dd2c5fc20676c0d + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/jeepney?source=hash-mapping + size: 40015 + timestamp: 1740828380668 +- conda: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda + sha256: fc9ca7348a4f25fed2079f2153ecdcf5f9cf2a0bc36c4172420ca09e1849df7b + md5: 04558c96691bed63104678757beb4f8d + depends: + - markupsafe >=2.0 + - python >=3.10 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jinja2?source=compressed-mapping + size: 120685 + timestamp: 1764517220861 +- conda: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.1.0-pyhcf101f3_1.conda + sha256: 904d43d5210584004cf8b38f9657c717661ae55b0fb3f60573be974e50653fa1 + md5: cc73a9bd315659dc5307a5270f44786f + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jmespath?source=hash-mapping + size: 25946 + timestamp: 1769161799923 +- conda: https://conda.anaconda.org/conda-forge/noarch/json-merge-patch-0.2-pyhd8ed1ab_2.conda + sha256: dcb8881bd19ed15e321ae35bddd74c22277fbd5f4e47e4d62f40362f9212305d + md5: 4d05d9514233b53fe421c34e6b249c6b + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/json-merge-patch?source=hash-mapping + size: 11200 + timestamp: 1734249397662 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda + sha256: 304955757d1fedbe344af43b12b5467cca072f83cce6109361ba942e186b3993 + md5: cb60ae9cf02b9fcb8004dec4089e5691 + depends: + - jsonpointer >=1.9 + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jsonpatch?source=hash-mapping + size: 17311 + timestamp: 1733814664790 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonpointer-3.1.1-pyhcf101f3_0.conda + sha256: a3d10301b6ff399ba1f3d39e443664804a3d28315a4fb81e745b6817845f70ae + md5: 89bf346df77603055d3c8fe5811691e6 + depends: + - python >=3.10 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jsonpointer?source=compressed-mapping + size: 14190 + timestamp: 1774311356147 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.26.0-pyhcf101f3_0.conda + sha256: db973a37d75db8e19b5f44bbbdaead0c68dde745407f281e2a7fe4db74ec51d7 + md5: ada41c863af263cc4c5fcbaff7c3e4dc + depends: + - attrs >=22.2.0 + - jsonschema-specifications >=2023.3.6 + - python >=3.10 + - referencing >=0.28.4 + - rpds-py >=0.25.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jsonschema?source=hash-mapping + size: 82356 + timestamp: 1767839954256 +- conda: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2025.9.1-pyhcf101f3_0.conda + sha256: 0a4f3b132f0faca10c89fdf3b60e15abb62ded6fa80aebfc007d05965192aa04 + md5: 439cd0f567d697b20a8f45cb70a1005a + depends: + - python >=3.10 + - referencing >=0.31.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/jsonschema-specifications?source=hash-mapping + size: 19236 + timestamp: 1757335715225 +- conda: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.9.1-pyhc90fa1f_0.conda + sha256: 1d34b80e5bfcd5323f104dbf99a2aafc0e5d823019d626d0dce5d3d356a2a52a + md5: b38fe4e78ee75def7e599843ef4c1ab0 + depends: + - __unix + - python + - platformdirs >=2.5 + - python >=3.10 + - traitlets >=5.3 + - python + constrains: + - pywin32 >=300 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/jupyter-core?source=hash-mapping + size: 65503 + timestamp: 1760643864586 +- conda: https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-hd590300_3.conda + sha256: 2057ca87b313bde5b74b93b0e696f8faab69acd4cb0edebb78469f3f388040c0 + md5: 5aeabe88534ea4169d4c49998f293d6c + depends: + - libgcc-ng >=12 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 239104 + timestamp: 1703333860145 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/jxrlib-1.1-h93a5062_3.conda + sha256: c9e0d3cf9255d4585fa9b3d07ace3bd934fdc6a67ef4532e5507282eff2364ab + md5: 879997fd868f8e9e4c2a12aec8583799 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 197843 + timestamp: 1703334079437 +- conda: https://conda.anaconda.org/conda-forge/noarch/keyring-25.7.0-pyh534df25_0.conda + sha256: 9def5c6fb3b3b4952a4f6b55a019b5c7065b592682b84710229de5a0b73f6364 + md5: c88f9579d08eb4031159f03640714ce3 + depends: + - __osx + - importlib-metadata >=4.11.4 + - importlib_resources + - jaraco.classes + - jaraco.context + - jaraco.functools + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/keyring?source=hash-mapping + size: 37924 + timestamp: 1763320995459 +- conda: https://conda.anaconda.org/conda-forge/noarch/keyring-25.7.0-pyha804496_0.conda + sha256: 010718b1b1a35ce72782d38e6d6b9495d8d7d0dbea9a3e42901d030ff2189545 + md5: 9eeb0eaf04fa934808d3e070eebbe630 + depends: + - __linux + - importlib-metadata >=4.11.4 + - importlib_resources + - jaraco.classes + - jaraco.context + - jaraco.functools + - jeepney >=0.4.2 + - python >=3.10 + - secretstorage >=3.2 + license: MIT + license_family: MIT + purls: + - pkg:pypi/keyring?source=hash-mapping + size: 37717 + timestamp: 1763320674488 +- conda: https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda + sha256: 0960d06048a7185d3542d850986d807c6e37ca2e644342dd0c72feefcf26c2a4 + md5: b38117a3c920364aff79f870c984b4a3 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: LGPL-2.1-or-later + purls: [] + size: 134088 + timestamp: 1754905959823 +- conda: https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.5.0-py310haaf941d_0.conda + sha256: 44312f8b881a4c77af4be198c8e2e2022e406f58314191c31be8e172382ecdf7 + md5: 8993ab7e5dce89147288dd78686e790c + depends: + - python + - libstdcxx >=14 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/kiwisolver?source=hash-mapping + size: 77809 + timestamp: 1773067043838 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/kiwisolver-1.5.0-py310h34990b0_0.conda + sha256: 3d902014b20f2e4a3d5a20fc1a3bd4a66c5ad46e0f3b2031f7c643ae178ecfcf + md5: 5f82c645836131e2d910d5562a598bd3 + depends: + - python + - __osx >=11.0 + - libcxx >=19 + - python 3.10.* *_cpython + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/kiwisolver?source=hash-mapping + size: 66764 + timestamp: 1773067259184 +- conda: https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda + sha256: 99df692f7a8a5c27cd14b5fb1374ee55e756631b9c3d659ed3ee60830249b238 + md5: 3f43953b7d3fb3aaa1d0d0723d91e368 + depends: + - keyutils >=1.6.1,<2.0a0 + - libedit >=3.1.20191231,<3.2.0a0 + - libedit >=3.1.20191231,<4.0a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + - openssl >=3.3.1,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 1370023 + timestamp: 1719463201255 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/krb5-1.22.2-h385eeb1_0.conda + sha256: c0a0bf028fe7f3defcdcaa464e536cf1b202d07451e18ad83fdd169d15bef6ed + md5: e446e1822f4da8e5080a9de93474184d + depends: + - __osx >=11.0 + - libcxx >=19 + - libedit >=3.1.20250104,<3.2.0a0 + - libedit >=3.1.20250104,<4.0a0 + - openssl >=3.5.5,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 1160828 + timestamp: 1769770119811 +- conda: https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.5-pyhd8ed1ab_0.conda + sha256: 1a88069ac61d2756ccaf26a6c206ab4d56610fb054bd2fffb5df4cd0744ab78e + md5: 75932da6f03a6bef32b70a51e991f6eb + depends: + - packaging + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/lazy-loader?source=hash-mapping + size: 14883 + timestamp: 1772817374026 +- conda: https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda + sha256: d6a61830a354da022eae93fa896d0991385a875c6bba53c82263a289deda9db8 + md5: 000e85703f0fd9594c81710dd5066471 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libjpeg-turbo >=3.0.0,<4.0a0 + - libtiff >=4.7.0,<4.8.0a0 + license: MIT + license_family: MIT + purls: [] + size: 248046 + timestamp: 1739160907615 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lcms2-2.18-hdfa7624_0.conda + sha256: d768da024ab74a4b30642401877fa914a68bdc238667f16b1ec2e0e98b2451a6 + md5: 6631a7bd2335bb9699b1dbc234b19784 + depends: + - __osx >=11.0 + - libjpeg-turbo >=3.1.2,<4.0a0 + - libtiff >=4.7.1,<4.8.0a0 + license: MIT + license_family: MIT + purls: [] + size: 211756 + timestamp: 1768184994800 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_102.conda + sha256: 3d584956604909ff5df353767f3a2a2f60e07d070b328d109f30ac40cd62df6c + md5: 18335a698559cdbcd86150a48bf54ba6 + depends: + - __glibc >=2.17,<3.0.a0 + - zstd >=1.5.7,<1.6.0a0 + constrains: + - binutils_impl_linux-64 2.45.1 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 728002 + timestamp: 1774197446916 +- conda: https://conda.anaconda.org/conda-forge/linux-64/lerc-4.1.0-hdb68285_0.conda + sha256: f84cb54782f7e9cea95e810ea8fef186e0652d0fa73d3009914fa2c1262594e1 + md5: a752488c68f2e7c456bcbd8f16eec275 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 261513 + timestamp: 1773113328888 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lerc-4.1.0-h1eee2c3_0.conda + sha256: 66e5ffd301a44da696f3efc2f25d6d94f42a9adc0db06c44ad753ab844148c51 + md5: 095e5749868adab9cae42d4b460e5443 + depends: + - __osx >=11.0 + - libcxx >=19 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 164222 + timestamp: 1773114244984 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20250512.1-cxx17_hba17884_0.conda + sha256: dcd1429a1782864c452057a6c5bc1860f2b637dc20a2b7e6eacd57395bbceff8 + md5: 83b160d4da3e1e847bf044997621ed63 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + constrains: + - libabseil-static =20250512.1=cxx17* + - abseil-cpp =20250512.1 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 1310612 + timestamp: 1750194198254 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20250512.1-cxx17_hd41c47c_0.conda + sha256: 7f0ee9ae7fa2cf7ac92b0acf8047c8bac965389e48be61bf1d463e057af2ea6a + md5: 360dbb413ee2c170a0a684a33c4fc6b8 + depends: + - __osx >=11.0 + - libcxx >=18 + constrains: + - libabseil-static =20250512.1=cxx17* + - abseil-cpp =20250512.1 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 1174081 + timestamp: 1750194620012 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-postgresql-1.8.0-h6eab0cb_0.conda + sha256: ecdb0d37282b0bf83a467875beec0fc7d103b67ae614152d4c0abbc4383e7534 + md5: 2dee75838b7854a99ef46d0bdc34add8 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libpq >=17.6,<18.0a0 + - libstdcxx >=14 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 275080 + timestamp: 1757913275428 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libadbc-driver-sqlite-1.11.0-hcea63bf_0.conda + sha256: 5760877ea412bd04751d35fb12c837402b0adac76bb107198c872990fc5b4091 + md5: c355d791db5febe0f7b4811c9c8c936d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libsqlite >=3.52.0,<4.0a0 + - libstdcxx >=14 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 201170 + timestamp: 1775527103879 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda + sha256: 822e4ae421a7e9c04e841323526321185f6659222325e1a9aedec811c686e688 + md5: 86f7414544ae606282352fa1e116b41f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 36544 + timestamp: 1769221884824 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libaec-1.1.5-h8664d51_0.conda + sha256: af9cd8db11eb719e38a3340c88bb4882cf19b5b4237d93845224489fc2a13b46 + md5: 13e6d9ae0efbc9d2e9a01a91f4372b41 + depends: + - __osx >=11.0 + - libcxx >=19 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 30390 + timestamp: 1769222133373 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-22.0.0-h91d8edf_1_cpu.conda + build_number: 1 + sha256: d362bce15ae96227aa6a530f162a0c9bcb840271d358ec7d0be91fc2034fb9da + md5: 281f485a45936155da9f0f607629f7b5 + depends: + - __glibc >=2.17,<3.0.a0 + - aws-crt-cpp >=0.35.0,<0.35.1.0a0 + - aws-sdk-cpp >=1.11.606,<1.11.607.0a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-identity-cpp >=1.13.2,<1.13.3.0a0 + - azure-storage-blobs-cpp >=12.15.0,<12.15.1.0a0 + - azure-storage-files-datalake-cpp >=12.13.0,<12.13.1.0a0 + - bzip2 >=1.0.8,<2.0a0 + - glog >=0.7.1,<0.8.0a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libgcc >=14 + - libgoogle-cloud >=2.39.0,<2.40.0a0 + - libgoogle-cloud-storage >=2.39.0,<2.40.0a0 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - orc >=2.2.1,<2.2.2.0a0 + - snappy >=1.2.2,<1.3.0a0 + - zstd >=1.5.7,<1.6.0a0 + constrains: + - parquet-cpp <0.0a0 + - apache-arrow-proc =*=cpu + - arrow-cpp <0.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 6299528 + timestamp: 1761731556412 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-22.0.0-h7239961_1_cpu.conda + build_number: 1 + sha256: 409eebbe2d3c7d7beb221824023fca6351b495a3bba64e16bea5cf999020eb13 + md5: 6094bb8dbfc8c343e60648bcb6fa1cbc + depends: + - __osx >=11.0 + - aws-crt-cpp >=0.35.0,<0.35.1.0a0 + - aws-sdk-cpp >=1.11.606,<1.11.607.0a0 + - azure-core-cpp >=1.16.1,<1.16.2.0a0 + - azure-identity-cpp >=1.13.2,<1.13.3.0a0 + - azure-storage-blobs-cpp >=12.15.0,<12.15.1.0a0 + - azure-storage-files-datalake-cpp >=12.13.0,<12.13.1.0a0 + - bzip2 >=1.0.8,<2.0a0 + - glog >=0.7.1,<0.8.0a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libcxx >=19 + - libgoogle-cloud >=2.39.0,<2.40.0a0 + - libgoogle-cloud-storage >=2.39.0,<2.40.0a0 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - orc >=2.2.1,<2.2.2.0a0 + - snappy >=1.2.2,<1.3.0a0 + - zstd >=1.5.7,<1.6.0a0 + constrains: + - parquet-cpp <0.0a0 + - arrow-cpp <0.0a0 + - apache-arrow-proc =*=cpu + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 4168705 + timestamp: 1761730644808 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-22.0.0-h635bf11_1_cpu.conda + build_number: 1 + sha256: 439e63096f3aa640b505bfbee37f015145719ae066890902e6db08a9b6f7f8d0 + md5: a99ccce2115d67e4561dabbf46a39719 + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 22.0.0 h91d8edf_1_cpu + - libarrow-compute 22.0.0 h8c2c5c3_1_cpu + - libgcc >=14 + - libstdcxx >=14 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 582277 + timestamp: 1761731871814 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-22.0.0-hc317990_1_cpu.conda + build_number: 1 + sha256: 8f2d0109c3e3ffd9d593e9f048a0f24c42c5faa1c5c4b8be4048faedab94667a + md5: fd0bef87260f88df84c44ec35a59855d + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h7239961_1_cpu + - libarrow-compute 22.0.0 h75845d1_1_cpu + - libcxx >=19 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 518492 + timestamp: 1761731184474 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-compute-22.0.0-h8c2c5c3_1_cpu.conda + build_number: 1 + sha256: 31b5903243b889cfbae2ad5e26ef33d71251d7fe97f361b10debcab8d5e2123d + md5: d95a82f052bb3d4d3162c4e4f862d8ef + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 22.0.0 h91d8edf_1_cpu + - libgcc >=14 + - libre2-11 >=2025.8.12 + - libstdcxx >=14 + - libutf8proc >=2.11.0,<2.12.0a0 + - re2 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 3014308 + timestamp: 1761731664524 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-compute-22.0.0-h75845d1_1_cpu.conda + build_number: 1 + sha256: f11289fdd315f820b7594bac576752dbbef433a52d9d44e5e2a527490879427c + md5: f64692bb70ea00fb59c19c61c68bed20 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h7239961_1_cpu + - libcxx >=19 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libre2-11 >=2025.8.12 + - libutf8proc >=2.11.0,<2.12.0a0 + - re2 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 2151953 + timestamp: 1761730867732 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-22.0.0-h635bf11_1_cpu.conda + build_number: 1 + sha256: 3a931719fc4667341e2d00bcde1098afd772633361571bc803a4f759351e1052 + md5: 123dbb58a0fbfc00f5f1cbe38be48ac0 + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 22.0.0 h91d8edf_1_cpu + - libarrow-acero 22.0.0 h635bf11_1_cpu + - libarrow-compute 22.0.0 h8c2c5c3_1_cpu + - libgcc >=14 + - libparquet 22.0.0 h7376487_1_cpu + - libstdcxx >=14 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 581101 + timestamp: 1761732002172 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-22.0.0-hc317990_1_cpu.conda + build_number: 1 + sha256: 4cbddf43e9188af90dac9a2cb06f94563e03d1d4558b1f9922c18d25f865f76d + md5: cc44424d10adddcce148c470b1980bd4 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h7239961_1_cpu + - libarrow-acero 22.0.0 hc317990_1_cpu + - libarrow-compute 22.0.0 h75845d1_1_cpu + - libcxx >=19 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libparquet 22.0.0 h0ac143b_1_cpu + - libprotobuf >=6.31.1,<6.31.2.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 515679 + timestamp: 1761731390669 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-22.0.0-h3f74fd7_1_cpu.conda + build_number: 1 + sha256: a751da44700cd2c95b8b880b86960bdef75f889682379962a258eed7f9e36c67 + md5: 497c7e4b65c360ef4d2ec1cdcada3acc + depends: + - __glibc >=2.17,<3.0.a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h91d8edf_1_cpu + - libarrow-acero 22.0.0 h635bf11_1_cpu + - libarrow-dataset 22.0.0 h635bf11_1_cpu + - libgcc >=14 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libstdcxx >=14 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 483145 + timestamp: 1761732044316 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-22.0.0-h144af7f_1_cpu.conda + build_number: 1 + sha256: e71b90a47d00482e59ddd3aa841b87959b181c3d1f6e5dca065a967c25bfb2f0 + md5: 5ea312e8c6d8d720b8a1468e6a11fed1 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h7239961_1_cpu + - libarrow-acero 22.0.0 hc317990_1_cpu + - libarrow-dataset 22.0.0 hc317990_1_cpu + - libcxx >=19 + - libprotobuf >=6.31.1,<6.31.2.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 452862 + timestamp: 1761731467365 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.4.1-hcfa2d63_0.conda + sha256: e29d8ed0334305c6bafecb32f9a1967cfc0a081eac916e947a1f2f7c4bb41947 + md5: f79415aee8862b3af85ea55dea37e46b + depends: + - __glibc >=2.17,<3.0.a0 + - aom >=3.9.1,<3.10.0a0 + - dav1d >=1.2.1,<1.2.2.0a0 + - libgcc >=14 + - rav1e >=0.8.1,<0.9.0a0 + - svt-av1 >=4.0.1,<4.0.2.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 148710 + timestamp: 1774042709303 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.4.1-hfce71f6_0.conda + sha256: 09e31e51026a3b74d947ba4b30a68dd99013aeef2860bcb03565bf43cad18da6 + md5: 2df04ee54a2ce2d34cf375eb02a63725 + depends: + - __osx >=11.0 + - aom >=3.9.1,<3.10.0a0 + - dav1d >=1.2.1,<1.2.2.0a0 + - rav1e >=0.8.1,<0.9.0a0 + - svt-av1 >=4.0.1,<4.0.2.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 118959 + timestamp: 1774043016600 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-6_h4a7cf45_openblas.conda + build_number: 6 + sha256: 7bfe936dbb5db04820cf300a9cc1f5ee8d5302fc896c2d66e30f1ee2f20fbfd6 + md5: 6d6d225559bfa6e2f3c90ee9c03d4e2e + depends: + - libopenblas >=0.3.32,<0.3.33.0a0 + - libopenblas >=0.3.32,<1.0a0 + constrains: + - blas 2.306 openblas + - liblapack 3.11.0 6*_openblas + - liblapacke 3.11.0 6*_openblas + - libcblas 3.11.0 6*_openblas + - mkl <2026 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18621 + timestamp: 1774503034895 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.11.0-6_h51639a9_openblas.conda + build_number: 6 + sha256: 979227fc03628925037ab2dfda008eb7b5592644d9c2c21dd285cefe8c42553d + md5: e551103471911260488a02155cef9c94 + depends: + - libopenblas >=0.3.32,<0.3.33.0a0 + - libopenblas >=0.3.32,<1.0a0 + constrains: + - liblapacke 3.11.0 6*_openblas + - liblapack 3.11.0 6*_openblas + - blas 2.306 openblas + - libcblas 3.11.0 6*_openblas + - mkl <2026 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18859 + timestamp: 1774504387211 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb03c661_4.conda + sha256: 2338a92d1de71f10c8cf70f7bb9775b0144a306d75c4812276749f54925612b6 + md5: 1d29d2e33fe59954af82ef54a8af3fe1 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 69333 + timestamp: 1756599354727 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-h6caf38d_4.conda + sha256: 023b609ecc35bfee7935d65fcc5aba1a3ba6807cbba144a0730198c0914f7c79 + md5: 231cffe69d41716afe4525c5c1cc5ddd + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 68938 + timestamp: 1756599687687 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb03c661_4.conda + sha256: fcec0d26f67741b122f0d5eff32f0393d7ebd3ee6bb866ae2f17f3425a850936 + md5: 5cb5a1c9a94a78f5b23684bcb845338d + depends: + - __glibc >=2.17,<3.0.a0 + - libbrotlicommon 1.1.0 hb03c661_4 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 33406 + timestamp: 1756599364386 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-h6caf38d_4.conda + sha256: 7f1cf83a00a494185fc087b00c355674a0f12e924b1b500d2c20519e98fdc064 + md5: cb7e7fe96c9eee23a464afd57648d2cd + depends: + - __osx >=11.0 + - libbrotlicommon 1.1.0 h6caf38d_4 + license: MIT + license_family: MIT + purls: [] + size: 29015 + timestamp: 1756599708339 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb03c661_4.conda + sha256: d42c7f0afce21d5279a0d54ee9e64a2279d35a07a90e0c9545caae57d6d7dc57 + md5: 2e55011fa483edb8bfe3fd92e860cd79 + depends: + - __glibc >=2.17,<3.0.a0 + - libbrotlicommon 1.1.0 hb03c661_4 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 289680 + timestamp: 1756599375485 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-h6caf38d_4.conda + sha256: a2f2c1c2369360147c46f48124a3a17f5122e78543275ff9788dc91a1d5819dc + md5: 4ce5651ae5cd6eebc5899f9bfe0eac3c + depends: + - __osx >=11.0 + - libbrotlicommon 1.1.0 h6caf38d_4 + license: MIT + license_family: MIT + purls: [] + size: 275791 + timestamp: 1756599724058 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-6_h0358290_openblas.conda + build_number: 6 + sha256: 57edafa7796f6fa3ebbd5367692dd4c7f552be42109c2dd1a7c89b55089bf374 + md5: 36ae340a916635b97ac8a0655ace2a35 + depends: + - libblas 3.11.0 6_h4a7cf45_openblas + constrains: + - blas 2.306 openblas + - liblapack 3.11.0 6*_openblas + - liblapacke 3.11.0 6*_openblas + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18622 + timestamp: 1774503050205 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.11.0-6_hb0561ab_openblas.conda + build_number: 6 + sha256: 2e6b3e9b1ab672133b70fc6730e42290e952793f132cb5e72eee22835463eba0 + md5: 805c6d31c5621fd75e53dfcf21fb243a + depends: + - libblas 3.11.0 6_h51639a9_openblas + constrains: + - liblapacke 3.11.0 6*_openblas + - blas 2.306 openblas + - liblapack 3.11.0 6*_openblas + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18863 + timestamp: 1774504433388 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda + sha256: de512ce246faec2d4f7766774769921a85b5aa053a74abd2f8c97ad50b393aac + md5: 24a2802074d26aecfdbc9b3f1d8168d1 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libllvm21 >=21.1.8,<21.2.0a0 + - libstdcxx >=14 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + purls: [] + size: 21066639 + timestamp: 1770190428756 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.3-default_h746c552_1.conda + sha256: 7a86861402343f1cc0845b837986d677dd93cfe5006d4f02126aa13581d93b41 + md5: 80daec8cf93185515ac7b5d359e3f929 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libllvm22 >=22.1.3,<22.2.0a0 + - libstdcxx >=14 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + purls: [] + size: 12822694 + timestamp: 1776099888592 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 + sha256: fd1d153962764433fe6233f34a72cdeed5dcf8a883a85769e8295ce940b5b0c5 + md5: c965a5aa0d5c1c37ffc62dff36e28400 + depends: + - libgcc-ng >=9.4.0 + - libstdcxx-ng >=9.4.0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 20440 + timestamp: 1633683576494 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcrc32c-1.1.2-hbdafb3b_0.tar.bz2 + sha256: 58477b67cc719060b5b069ba57161e20ba69b8695d154a719cb4b60caf577929 + md5: 32bd82a6a625ea6ce090a81c3d34edeb + depends: + - libcxx >=11.1.0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18765 + timestamp: 1633683992603 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda + sha256: cb83980c57e311783ee831832eb2c20ecb41e7dee6e86e8b70b8cef0e43eab55 + md5: d4a250da4737ee127fb1fa6452a9002e + depends: + - __glibc >=2.17,<3.0.a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 + - libstdcxx >=13 + - libzlib >=1.3.1,<2.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 4523621 + timestamp: 1749905341688 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda + sha256: 5454709d9fb6e9c3dd6423bc284fa7835a7823bfa8323f6e8786cdd555101fab + md5: 0a5563efed19ca4461cf927419b6eb73 + depends: + - __glibc >=2.17,<3.0.a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=14 + - libnghttp2 >=1.67.0,<2.0a0 + - libssh2 >=1.11.1,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.4,<4.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: curl + license_family: MIT + purls: [] + size: 462942 + timestamp: 1767821743793 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcurl-8.19.0-hd5a2499_0.conda + sha256: c4d581b067fa60f9dc0e1c5f18b756760ff094a03139e6b206eb98d185ae2bb1 + md5: 9fc7771fc8104abed9119113160be15a + depends: + - __osx >=11.0 + - krb5 >=1.22.2,<1.23.0a0 + - libnghttp2 >=1.67.0,<2.0a0 + - libssh2 >=1.11.1,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.5,<4.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: curl + license_family: MIT + purls: [] + size: 399616 + timestamp: 1773219210246 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-22.1.3-h55c6f16_0.conda + sha256: 34cc56c627b01928e49731bcfe92338e440ab6b5952feee8f1dd16570b8b8339 + md5: acbb3f547c4aae16b19e417db0c6e5ed + depends: + - __osx >=11.0 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + purls: [] + size: 570026 + timestamp: 1775565121045 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h86f0d12_0.conda + sha256: 4db2f70a1441317d964e84c268e388110ad9cf75ca98994d1336d670e62e6f07 + md5: 27fe770decaf469a53f3e3a6d593067f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + purls: [] + size: 72783 + timestamp: 1745260463421 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libdeflate-1.24-h5773f1b_0.conda + sha256: 417d52b19c679e1881cce3f01cad3a2d542098fa2d6df5485aac40f01aede4d1 + md5: 3baf58a5a87e7c2f4d243ce2f8f2fe5c + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 54790 + timestamp: 1747040549847 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda + sha256: c076a213bd3676cc1ef22eeff91588826273513ccc6040d9bea68bccdc849501 + md5: 9314bc5a1fe7d1044dc9dfd3ef400535 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libpciaccess >=0.18,<0.19.0a0 + license: MIT + license_family: MIT + purls: [] + size: 310785 + timestamp: 1757212153962 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda + sha256: d789471216e7aba3c184cd054ed61ce3f6dac6f87a50ec69291b9297f8c18724 + md5: c277e0a4d549b03ac1e9d6cbbe3d017b + depends: + - ncurses + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - ncurses >=6.5,<7.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 134676 + timestamp: 1738479519902 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libedit-3.1.20250104-pl5321hafb1f1b_0.conda + sha256: 66aa216a403de0bb0c1340a88d1a06adaff66bae2cfd196731aa24db9859d631 + md5: 44083d2d2c2025afca315c7a172eab2b + depends: + - ncurses + - __osx >=11.0 + - ncurses >=6.5,<7.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 107691 + timestamp: 1738479560845 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda + sha256: 7fd5408d359d05a969133e47af580183fbf38e2235b562193d427bb9dad79723 + md5: c151d5eb730e9b7480e6d48c0fc44048 + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + license: LicenseRef-libglvnd + purls: [] + size: 44840 + timestamp: 1731330973553 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda + sha256: 1cd6048169fa0395af74ed5d8f1716e22c19a81a8a36f934c110ca3ad4dd27b4 + md5: 172bf1cd1ff8629f2b1179945ed45055 + depends: + - libgcc-ng >=12 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 112766 + timestamp: 1702146165126 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libev-4.33-h93a5062_2.conda + sha256: 95cecb3902fbe0399c3a7e67a5bed1db813e5ab0e22f4023a5e0f722f2cc214f + md5: 36d33e440c31857372a72137f78bacf5 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 107458 + timestamp: 1702146414478 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda + sha256: 2e14399d81fb348e9d231a82ca4d816bf855206923759b69ad006ba482764131 + md5: a1cfcc585f0c42bf8d5546bb1dfb668d + depends: + - libgcc-ng >=12 + - openssl >=3.1.1,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 427426 + timestamp: 1685725977222 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libevent-2.1.12-h2757513_1.conda + sha256: 8c136d7586259bb5c0d2b913aaadc5b9737787ae4f40e3ad1beaf96c80b919b7 + md5: 1a109764bff3bdc7bdd84088347d71dc + depends: + - openssl >=3.1.1,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 368167 + timestamp: 1685726248899 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.5-hecca717_0.conda + sha256: e8c2b57f6aacabdf2f1b0924bd4831ce5071ba080baa4a9e8c0d720588b6794c + md5: 49f570f3bc4c874a06ea69b7225753af + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - expat 2.7.5.* + license: MIT + license_family: MIT + purls: [] + size: 76624 + timestamp: 1774719175983 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.5-hf6b4638_0.conda + sha256: 06780dec91dd25770c8cf01e158e1062fbf7c576b1406427475ce69a8af75b7e + md5: a32123f93e168eaa4080d87b0fb5da8a + depends: + - __osx >=11.0 + constrains: + - expat 2.7.5.* + license: MIT + license_family: MIT + purls: [] + size: 68192 + timestamp: 1774719211725 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + sha256: 31f19b6a88ce40ebc0d5a992c131f57d919f73c0b92cd1617a5bec83f6e961e6 + md5: a360c33a5abe61c07959e449fa1453eb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 58592 + timestamp: 1769456073053 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + sha256: 6686a26466a527585e6a75cc2a242bf4a3d97d6d6c86424a441677917f28bec7 + md5: 43c04d9cb46ef176bb2a4c77e324d599 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 40979 + timestamp: 1769456747661 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.3-ha770c72_0.conda + sha256: 38f014a7129e644636e46064ecd6b1945e729c2140e21d75bb476af39e692db2 + md5: e289f3d17880e44b633ba911d57a321b + depends: + - libfreetype6 >=2.14.3 + license: GPL-2.0-only OR FTL + purls: [] + size: 8049 + timestamp: 1774298163029 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype-2.14.3-hce30654_0.conda + sha256: a047a2f238362a37d484f9620e8cba29f513a933cd9eb68571ad4b270d6f8f3e + md5: f73b109d49568d5d1dda43bb147ae37f + depends: + - libfreetype6 >=2.14.3 + license: GPL-2.0-only OR FTL + purls: [] + size: 8091 + timestamp: 1774298691258 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.3-h73754d4_0.conda + sha256: 16f020f96da79db1863fcdd8f2b8f4f7d52f177dd4c58601e38e9182e91adf1d + md5: fb16b4b69e3f1dcfe79d80db8fd0c55d + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libpng >=1.6.55,<1.7.0a0 + - libzlib >=1.3.2,<2.0a0 + constrains: + - freetype >=2.14.3 + license: GPL-2.0-only OR FTL + purls: [] + size: 384575 + timestamp: 1774298162622 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libfreetype6-2.14.3-hdfa99f5_0.conda + sha256: ff764608e1f2839e95e2cf9b243681475f8778c36af7a42b3f78f476fdbb1dd3 + md5: e98ba7b5f09a5f450eca083d5a1c4649 + depends: + - __osx >=11.0 + - libpng >=1.6.55,<1.7.0a0 + - libzlib >=1.3.2,<2.0a0 + constrains: + - freetype >=2.14.3 + license: GPL-2.0-only OR FTL + purls: [] + size: 338085 + timestamp: 1774298689297 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + sha256: faf7d2017b4d718951e3a59d081eb09759152f93038479b768e3d612688f83f5 + md5: 0aa00f03f9e39fb9876085dee11a85d4 + depends: + - __glibc >=2.17,<3.0.a0 + - _openmp_mutex >=4.5 + constrains: + - libgcc-ng ==15.2.0=*_18 + - libgomp 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 1041788 + timestamp: 1771378212382 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgcc-15.2.0-hcbb3090_18.conda + sha256: 1d9c4f35586adb71bcd23e31b68b7f3e4c4ab89914c26bed5f2859290be5560e + md5: 92df6107310b1fff92c4cc84f0de247b + depends: + - _openmp_mutex + constrains: + - libgcc-ng ==15.2.0=*_18 + - libgomp 15.2.0 18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 401974 + timestamp: 1771378877463 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + sha256: e318a711400f536c81123e753d4c797a821021fb38970cebfb3f454126016893 + md5: d5e96b1ed75ca01906b3d2469b4ce493 + depends: + - libgcc 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 27526 + timestamp: 1771378224552 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda + sha256: d2c9fad338fd85e4487424865da8e74006ab2e2475bd788f624d7a39b2a72aee + md5: 9063115da5bc35fdc3e1002e69b9ef6e + depends: + - libgfortran5 15.2.0 h68bc16d_18 + constrains: + - libgfortran-ng ==15.2.0=*_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 27523 + timestamp: 1771378269450 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran-15.2.0-h07b0088_18.conda + sha256: 63f89087c3f0c8621c5c89ecceec1e56e5e1c84f65fc9c5feca33a07c570a836 + md5: 26981599908ed2205366e8fc91b37fc6 + depends: + - libgfortran5 15.2.0 hdae7583_18 + constrains: + - libgfortran-ng ==15.2.0=*_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 138973 + timestamp: 1771379054939 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda + sha256: 539b57cf50ec85509a94ba9949b7e30717839e4d694bc94f30d41c9d34de2d12 + md5: 646855f357199a12f02a87382d429b75 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=15.2.0 + constrains: + - libgfortran 15.2.0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 2482475 + timestamp: 1771378241063 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgfortran5-15.2.0-hdae7583_18.conda + sha256: 91033978ba25e6a60fb86843cf7e1f7dc8ad513f9689f991c9ddabfaf0361e7e + md5: c4a6f7989cffb0544bfd9207b6789971 + depends: + - libgcc >=15.2.0 + constrains: + - libgfortran 15.2.0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 598634 + timestamp: 1771378886363 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda + sha256: dc2752241fa3d9e40ce552c1942d0a4b5eeb93740c9723873f6fcf8d39ef8d2d + md5: 928b8be80851f5d8ffb016f9c81dae7a + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + - libglx 1.7.0 ha4b6fd6_2 + license: LicenseRef-libglvnd + purls: [] + size: 134712 + timestamp: 1731330998354 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.2-h32235b2_0.conda + sha256: 918306d6ed211ab483e4e19368e5748b265d24e75c88a1c66a61f72b9fa30b29 + md5: 0cb0612bc9cb30c62baf41f9d600611b + depends: + - __glibc >=2.17,<3.0.a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.46,<10.47.0a0 + constrains: + - glib 2.86.2 *_0 + license: LGPL-2.1-or-later + purls: [] + size: 3974801 + timestamp: 1763672326986 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.86.4-he378b5c_1.conda + sha256: a4254a241a96198e019ced2e0d2967e4c0ef64fac32077a45c065b32dc2b15d2 + md5: 673069f6725ed7b1073f9b96094294d1 + depends: + - __osx >=11.0 + - libffi >=3.5.2,<3.6.0a0 + - libiconv >=1.18,<2.0a0 + - libintl >=0.25.1,<1.0a0 + - libzlib >=1.3.1,<2.0a0 + - pcre2 >=10.47,<10.48.0a0 + constrains: + - glib 2.86.4 *_1 + license: LGPL-2.1-or-later + purls: [] + size: 4108927 + timestamp: 1771864169970 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda + sha256: 1175f8a7a0c68b7f81962699751bb6574e6f07db4c9f72825f978e3016f46850 + md5: 434ca7e50e40f4918ab701e3facd59a0 + depends: + - __glibc >=2.17,<3.0.a0 + license: LicenseRef-libglvnd + purls: [] + size: 132463 + timestamp: 1731330968309 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda + sha256: 2d35a679624a93ce5b3e9dd301fff92343db609b79f0363e6d0ceb3a6478bfa7 + md5: c8013e438185f33b13814c5c488acd5c + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + - xorg-libx11 >=1.8.10,<2.0a0 + license: LicenseRef-libglvnd + purls: [] + size: 75504 + timestamp: 1731330988898 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + sha256: 21337ab58e5e0649d869ab168d4e609b033509de22521de1bfed0c031bfc5110 + md5: 239c5e9546c38a1e884d69effcf4c882 + depends: + - __glibc >=2.17,<3.0.a0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 603262 + timestamp: 1771378117851 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.39.0-hdb79228_0.conda + sha256: d3341cf69cb02c07bbd1837968f993da01b7bd467e816b1559a3ca26c1ff14c5 + md5: a2e30ccd49f753fd30de0d30b1569789 + depends: + - __glibc >=2.17,<3.0.a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcurl >=8.14.1,<9.0a0 + - libgcc >=14 + - libgrpc >=1.73.1,<1.74.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libstdcxx >=14 + - openssl >=3.5.1,<4.0a0 + constrains: + - libgoogle-cloud 2.39.0 *_0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 1307909 + timestamp: 1752048413383 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.39.0-head0a95_0.conda + sha256: 209facdb8ea5b68163f146525720768fa3191cef86c82b2538e8c3cafa1e9dd4 + md5: ad7272a081abe0966d0297691154eda5 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcurl >=8.14.1,<9.0a0 + - libcxx >=19 + - libgrpc >=1.73.1,<1.74.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - openssl >=3.5.1,<4.0a0 + constrains: + - libgoogle-cloud 2.39.0 *_0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 876283 + timestamp: 1752047598741 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.39.0-hdbdcf42_0.conda + sha256: 59eb8365f0aee384f2f3b2a64dcd454f1a43093311aa5f21a8bb4bd3c79a6db8 + md5: bd21962ff8a9d1ce4720d42a35a4af40 + depends: + - __glibc >=2.17,<3.0.a0 + - libabseil + - libcrc32c >=1.1.2,<1.2.0a0 + - libcurl + - libgcc >=14 + - libgoogle-cloud 2.39.0 hdb79228_0 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - openssl + license: Apache-2.0 + license_family: Apache + purls: [] + size: 804189 + timestamp: 1752048589800 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.39.0-hfa3a374_0.conda + sha256: a5160c23b8b231b88d0ff738c7f52b0ee703c4c0517b044b18f4d176e729dfd8 + md5: 147a468b9b6c3ced1fccd69b864ae289 + depends: + - __osx >=11.0 + - libabseil + - libcrc32c >=1.1.2,<1.2.0a0 + - libcurl + - libcxx >=19 + - libgoogle-cloud 2.39.0 head0a95_0 + - libzlib >=1.3.1,<2.0a0 + - openssl + license: Apache-2.0 + license_family: Apache + purls: [] + size: 525153 + timestamp: 1752047915306 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.73.1-h3288cfb_1.conda + sha256: bc9d32af6167b1f5bcda216dc44eddcb27f3492440571ab12f6e577472a05e34 + md5: ff63bb12ac31c176ff257e3289f20770 + depends: + - __glibc >=2.17,<3.0.a0 + - c-ares >=1.34.5,<2.0a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libgcc >=14 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libre2-11 >=2025.8.12 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.4,<4.0a0 + - re2 + constrains: + - grpc-cpp =1.73.1 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 8349777 + timestamp: 1761058442526 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.73.1-h3063b79_1.conda + sha256: c2099872b1aa06bf8153e35e5b706d2000c1fc16f4dde2735ccd77a0643a4683 + md5: f5856b3b9dae4463348a7ec23c1301f2 + depends: + - __osx >=11.0 + - c-ares >=1.34.5,<2.0a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcxx >=19 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libre2-11 >=2025.8.12 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.4,<4.0a0 + - re2 + constrains: + - grpc-cpp =1.73.1 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 5377798 + timestamp: 1761053602943 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda + sha256: 2bdd1cdd677b119abc5e83069bec2e28fe6bfb21ebaea3cd07acee67f38ea274 + md5: c2a0c1d0120520e979685034e0b79859 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: Apache-2.0 OR BSD-3-Clause + purls: [] + size: 1448617 + timestamp: 1758894401402 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libhwy-1.3.0-h48b13b8_1.conda + sha256: 837fe775ba8ec9f08655bb924e28dba390d917423350333a75fd5eeac0776174 + md5: 6375717f5fcd756de929a06d0e40fab0 + depends: + - __osx >=11.0 + - libcxx >=19 + license: Apache-2.0 OR BSD-3-Clause + purls: [] + size: 581579 + timestamp: 1758894814983 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda + sha256: c467851a7312765447155e071752d7bf9bf44d610a5687e32706f480aad2833f + md5: 915f5995e94f60e9a4826e0b0920ee88 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: LGPL-2.1-only + purls: [] + size: 790176 + timestamp: 1754908768807 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.18-h23cfdf5_2.conda + sha256: de0336e800b2af9a40bdd694b03870ac4a848161b35c8a2325704f123f185f03 + md5: 4d5a7445f0b25b6a3ddbb56e790f5251 + depends: + - __osx >=11.0 + license: LGPL-2.1-only + purls: [] + size: 750379 + timestamp: 1754909073836 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libintl-0.25.1-h493aca8_0.conda + sha256: 99d2cebcd8f84961b86784451b010f5f0a795ed1c08f1e7c76fbb3c22abf021a + md5: 5103f6a6b210a3912faf8d7db516918c + depends: + - __osx >=11.0 + - libiconv >=1.18,<2.0a0 + license: LGPL-2.1-or-later + purls: [] + size: 90957 + timestamp: 1751558394144 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.4.1-hb03c661_0.conda + sha256: 10056646c28115b174de81a44e23e3a0a3b95b5347d2e6c45cc6d49d35294256 + md5: 6178c6f2fb254558238ef4e6c56fb782 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - jpeg <0.0.0a + license: IJG AND BSD-3-Clause AND Zlib + purls: [] + size: 633831 + timestamp: 1775962768273 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjpeg-turbo-3.1.4.1-h84a0fba_0.conda + sha256: 17e035ae6a520ff6a6bb5dd93a4a7c3895891f4f9743bcb8c6ef607445a31cd0 + md5: b8a7544c83a67258b0e8592ec6a5d322 + depends: + - __osx >=11.0 + constrains: + - jpeg <0.0.0a + license: IJG AND BSD-3-Clause AND Zlib + purls: [] + size: 555681 + timestamp: 1775962975624 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-h6cb5226_4.conda + sha256: b9d924d69fc84cd3c660a181985748d9c2df34cd7c7bb03b92d8f70efa7753d9 + md5: f2840d9c2afb19e303e126c9d3a04b36 + depends: + - __glibc >=2.17,<3.0.a0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libgcc >=14 + - libhwy >=1.3.0,<1.4.0a0 + - libstdcxx >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 1740823 + timestamp: 1757583994233 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libjxl-0.11.1-h7274d02_4.conda + sha256: 74b3ded8f7f85c20b7fce0d28dfd462c49880f88458846c4f8b946d7ecb94076 + md5: 3c87b077b788e7844f0c8b866c5621ac + depends: + - __osx >=11.0 + - libbrotlidec >=1.1.0,<1.2.0a0 + - libbrotlienc >=1.1.0,<1.2.0a0 + - libcxx >=19 + - libhwy >=1.3.0,<1.4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 918558 + timestamp: 1757584152666 +- conda: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-6_h47877c9_openblas.conda + build_number: 6 + sha256: 371f517eb7010b21c6cc882c7606daccebb943307cb9a3bf2c70456a5c024f7d + md5: 881d801569b201c2e753f03c84b85e15 + depends: + - libblas 3.11.0 6_h4a7cf45_openblas + constrains: + - blas 2.306 openblas + - liblapacke 3.11.0 6*_openblas + - libcblas 3.11.0 6*_openblas + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18624 + timestamp: 1774503065378 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.11.0-6_hd9741b5_openblas.conda + build_number: 6 + sha256: 21606b7346810559e259807497b86f438950cf19e71838e44ebaf4bd2b35b549 + md5: ee33d2d05a7c5ea1f67653b37eb74db1 + depends: + - libblas 3.11.0 6_h51639a9_openblas + constrains: + - liblapacke 3.11.0 6*_openblas + - libcblas 3.11.0 6*_openblas + - blas 2.306 openblas + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 18863 + timestamp: 1774504467905 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda + sha256: 91bb4f5be1601b40b4995911d785e29387970f0b3c80f33f7f9028f95335399f + md5: 1a2708a460884d6861425b7f9a7bef99 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libxml2 + - libxml2-16 >=2.14.6 + - libzlib >=1.3.1,<2.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + purls: [] + size: 44333366 + timestamp: 1765959132513 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.3-hf7376ad_0.conda + sha256: ad732019e8dd963efb5a54b5ff49168f191246bc418c3033762b6e8cb64b530c + md5: aeb186f7165bf287495a267fa8ff4129 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libxml2 + - libxml2-16 >=2.14.6 + - libzlib >=1.3.2,<2.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: Apache-2.0 WITH LLVM-exception + license_family: Apache + purls: [] + size: 44235531 + timestamp: 1775641389057 +- conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.3-hb03c661_0.conda + sha256: ec30e52a3c1bf7d0425380a189d209a52baa03f22fb66dd3eb587acaa765bd6d + md5: b88d90cad08e6bc8ad540cb310a761fb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - xz 5.8.3.* + license: 0BSD + purls: [] + size: 113478 + timestamp: 1775825492909 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.3-h8088a28_0.conda + sha256: 34878d87275c298f1a732c6806349125cebbf340d24c6c23727268184bba051e + md5: b1fd823b5ae54fbec272cea0811bd8a9 + depends: + - __osx >=11.0 + constrains: + - xz 5.8.3.* + license: 0BSD + purls: [] + size: 92472 + timestamp: 1775825802659 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.68.1-h877daf1_0.conda + sha256: 663444d77a42f2265f54fb8b48c5450bfff4388d9c0f8253dd7855f0d993153f + md5: 2a45e7f8af083626f009645a6481f12d + depends: + - __glibc >=2.17,<3.0.a0 + - c-ares >=1.34.6,<2.0a0 + - libev >=4.33,<4.34.0a0 + - libev >=4.33,<5.0a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.5,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 663344 + timestamp: 1773854035739 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.68.1-h8f3e76b_0.conda + sha256: 2bc7bc3978066f2c274ebcbf711850cc9ab92e023e433b9631958a098d11e10a + md5: 6ea18834adbc3b33df9bd9fb45eaf95b + depends: + - __osx >=11.0 + - c-ares >=1.34.6,<2.0a0 + - libcxx >=19 + - libev >=4.33,<4.34.0a0 + - libev >=4.33,<5.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.5,<4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 576526 + timestamp: 1773854624224 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + sha256: 927fe72b054277cde6cb82597d0fcf6baf127dcbce2e0a9d8925a68f1265eef5 + md5: d864d34357c3b65a4b731f78c0801dc4 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: LGPL-2.1-only + license_family: GPL + purls: [] + size: 33731 + timestamp: 1750274110928 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda + sha256: 3b3f19ced060013c2dd99d9d46403be6d319d4601814c772a3472fe2955612b0 + md5: 7c7927b404672409d9917d49bff5f2d6 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: LGPL-2.1-or-later + purls: [] + size: 33418 + timestamp: 1734670021371 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.32-pthreads_h94d23a6_0.conda + sha256: 6dc30b28f32737a1c52dada10c8f3a41bc9e021854215efca04a7f00487d09d9 + md5: 89d61bc91d3f39fda0ca10fcd3c68594 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libgfortran + - libgfortran5 >=14.3.0 + constrains: + - openblas >=0.3.32,<0.3.33.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 5928890 + timestamp: 1774471724897 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.32-openmp_he657e61_0.conda + sha256: 713e453bde3531c22a660577e59bf91ef578dcdfd5edb1253a399fa23514949a + md5: 3a1111a4b6626abebe8b978bb5a323bf + depends: + - __osx >=11.0 + - libgfortran + - libgfortran5 >=14.3.0 + - llvm-openmp >=19.1.7 + constrains: + - openblas >=0.3.32,<0.3.33.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4308797 + timestamp: 1774472508546 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda + sha256: 215086c108d80349e96051ad14131b751d17af3ed2cb5a34edd62fa89bfe8ead + md5: 7df50d44d4a14d6c31a2c54f2cd92157 + depends: + - __glibc >=2.17,<3.0.a0 + - libglvnd 1.7.0 ha4b6fd6_2 + license: LicenseRef-libglvnd + purls: [] + size: 50757 + timestamp: 1731330993524 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-1.21.0-hb9b0907_1.conda + sha256: ba9b09066f9abae9b4c98ffedef444bbbf4c068a094f6c77d70ef6f006574563 + md5: 1c0320794855f457dea27d35c4c71e23 + depends: + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcurl >=8.14.1,<9.0a0 + - libgrpc >=1.73.1,<1.74.0a0 + - libopentelemetry-cpp-headers 1.21.0 ha770c72_1 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libzlib >=1.3.1,<2.0a0 + - nlohmann_json + - prometheus-cpp >=1.3.0,<1.4.0a0 + constrains: + - cpp-opentelemetry-sdk =1.21.0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 885397 + timestamp: 1751782709380 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-1.21.0-he15edb5_1.conda + sha256: 4bf8f703ddd140fe54d4c8464ac96b28520fbc1083cce52c136a85a854745d5c + md5: cbcea547d6d831863ab0a4e164099062 + depends: + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcurl >=8.14.1,<9.0a0 + - libgrpc >=1.73.1,<1.74.0a0 + - libopentelemetry-cpp-headers 1.21.0 hce30654_1 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libzlib >=1.3.1,<2.0a0 + - nlohmann_json + - prometheus-cpp >=1.3.0,<1.4.0a0 + constrains: + - cpp-opentelemetry-sdk =1.21.0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 564609 + timestamp: 1751782939921 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libopentelemetry-cpp-headers-1.21.0-ha770c72_1.conda + sha256: b3a1b36d5f92fbbfd7b6426982a99561bdbd7e4adbafca1b7f127c9a5ab0a60f + md5: 9e298d76f543deb06eb0f3413675e13a + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 363444 + timestamp: 1751782679053 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libopentelemetry-cpp-headers-1.21.0-hce30654_1.conda + sha256: ce74278453dec1e3c11158ec368c8f1b03862e279b63f79ed01f38567a1174e6 + md5: c7df4b2d612208f3a27486c113b6aefc + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 363213 + timestamp: 1751782889359 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libparquet-22.0.0-h7376487_1_cpu.conda + build_number: 1 + sha256: 7160c77ee93f7458cf71353cb3792d8b3892cb0a0f3641cf6a816dbaef4edd04 + md5: 30aebbbe04583804d0916b4a22278c12 + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 22.0.0 h91d8edf_1_cpu + - libgcc >=14 + - libstdcxx >=14 + - libthrift >=0.22.0,<0.22.1.0a0 + - openssl >=3.5.4,<4.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 1347441 + timestamp: 1761731824392 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-22.0.0-h0ac143b_1_cpu.conda + build_number: 1 + sha256: 14f9cd032d697146d0818208fece3a6180cfb0db9683acc2a4f61a30550ce678 + md5: 3cd7a188ac2f6e3e81168a3752cc86e0 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libarrow 22.0.0 h7239961_1_cpu + - libcxx >=19 + - libopentelemetry-cpp >=1.21.0,<1.22.0a0 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libthrift >=0.22.0,<0.22.1.0a0 + - openssl >=3.5.4,<4.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 1041032 + timestamp: 1761731118872 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda + sha256: 0bd91de9b447a2991e666f284ae8c722ffb1d84acb594dbd0c031bd656fa32b2 + md5: 70e3400cbbfa03e96dcde7fc13e38c7b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + purls: [] + size: 28424 + timestamp: 1749901812541 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.57-h421ea60_0.conda + sha256: 06323fb0a831440f0b72a53013182e1d4bb219e3ea958bb37af98b25dc0cf518 + md5: 06f225e6d8c549ad6c0201679828a882 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libzlib >=1.3.2,<2.0a0 + license: zlib-acknowledgement + purls: [] + size: 317779 + timestamp: 1775692841709 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libpng-1.6.57-h132b30e_0.conda + sha256: 3f2b76a220844a7b2217688910d59c5fce075f54d0cee03da55a344e6be8f8a0 + md5: 1a28041d8d998688fd82e25b45582b21 + depends: + - __osx >=11.0 + - libzlib >=1.3.2,<2.0a0 + license: zlib-acknowledgement + purls: [] + size: 289615 + timestamp: 1775692978357 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libpq-17.7-h5c52fec_1.conda + sha256: 06a8ace6cc5ee47b85a5e64fad621e5912a12a0202398f54f302eb4e8b9db1fd + md5: a4769024afeab4b32ac8167c2f92c7ac + depends: + - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=14 + - openldap >=2.6.10,<2.7.0a0 + - openssl >=3.5.4,<4.0a0 + license: PostgreSQL + purls: [] + size: 2649881 + timestamp: 1763565297202 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-6.31.1-h49aed37_4.conda + sha256: 0ef142ac31e6fd59b4af89ac800acb6deb3fbd9cc4ccf070c03cc2c784dc7296 + md5: 07479fc04ba3ddd5d9f760ef1635cfa7 + depends: + - __glibc >=2.17,<3.0.a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4372578 + timestamp: 1766316228461 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-6.31.1-h98f38fd_4.conda + sha256: 505d62fb2a487aff594a30f6c419f8e861fb3a47e25e407dae2779ac4a585b18 + md5: 8a6b4281c176f1695ae0015f420e6aa9 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcxx >=19 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 3131502 + timestamp: 1766315339805 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2025.11.05-h7b12aa8_0.conda + sha256: eb5d5ef4d12cdf744e0f728b35bca910843c8cf1249f758cf15488ca04a21dbb + md5: a30848ebf39327ea078cf26d114cff53 + depends: + - __glibc >=2.17,<3.0.a0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libgcc >=14 + - libstdcxx >=14 + constrains: + - re2 2025.11.05.* + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 211099 + timestamp: 1762397758105 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2025.11.05-h91c62da_0.conda + sha256: 7b525313ab16415c4a3191ccf59157c3a4520ed762c8ec61fcfb81d27daa4723 + md5: 060f099756e6baf2ed51b9065e44eda8 + depends: + - __osx >=11.0 + - libabseil * cxx17* + - libabseil >=20250512.1,<20250513.0a0 + - libcxx >=19 + constrains: + - re2 2025.11.05.* + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 165593 + timestamp: 1762398300610 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.53.0-h0c1763c_0.conda + sha256: d1688f91c013f9be0ad46492d4ec976ccc1dff5705a0b42be957abb73bf853bf + md5: 393c8b31bd128e3d979e7ec17e9507c6 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libzlib >=1.3.2,<2.0a0 + license: blessing + purls: [] + size: 954044 + timestamp: 1775753743691 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.53.0-h1b79a29_0.conda + sha256: 1a9d1e3e18dbb0b87cff3b40c3e42703730d7ac7ee9b9322c2682196a81ba0c3 + md5: 8423c008105df35485e184066cad4566 + depends: + - __osx >=11.0 + - libzlib >=1.3.2,<2.0a0 + license: blessing + purls: [] + size: 920039 + timestamp: 1775754485962 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda + sha256: fa39bfd69228a13e553bd24601332b7cfeb30ca11a3ca50bb028108fe90a7661 + md5: eecce068c7e4eddeb169591baac20ac4 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.0,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 304790 + timestamp: 1745608545575 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h1590b86_0.conda + sha256: 8bfe837221390ffc6f111ecca24fa12d4a6325da0c8d131333d63d6c37f27e0a + md5: b68e8f66b94b44aaa8de4583d3d4cc40 + depends: + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.0,<4.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 279193 + timestamp: 1745608793272 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + sha256: 78668020064fdaa27e9ab65cd2997e2c837b564ab26ce3bf0e58a2ce1a525c6e + md5: 1b08cd684f34175e4514474793d44bcb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc 15.2.0 he0feb66_18 + constrains: + - libstdcxx-ng ==15.2.0=*_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 5852330 + timestamp: 1771378262446 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda + sha256: 3c902ffd673cb3c6ddde624cdb80f870b6c835f8bf28384b0016e7d444dd0145 + md5: 6235adb93d064ecdf3d44faee6f468de + depends: + - libstdcxx 15.2.0 h934c35e_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 27575 + timestamp: 1771378314494 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.22.0-h454ac66_1.conda + sha256: 4888b9ea2593c36ca587a5ebe38d0a56a0e6d6a9e4bb7da7d9a326aaaca7c336 + md5: 8ed82d90e6b1686f5e98f8b7825a15ef + depends: + - __glibc >=2.17,<3.0.a0 + - libevent >=2.1.12,<2.1.13.0a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.1,<4.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 424208 + timestamp: 1753277183984 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.22.0-h14a376c_1.conda + sha256: 8b703f2c6e47ed5886d7298601b9416b59e823fc8d1a8fa867192c94c5911aac + md5: 3161023bb2f8c152e4c9aa59bdd40975 + depends: + - __osx >=11.0 + - libcxx >=19 + - libevent >=2.1.12,<2.1.13.0a0 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.1,<4.0a0 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 323360 + timestamp: 1753277264380 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_4.conda + sha256: 7480613af15795281bd338a4d3d2ca148f9c2ecafc967b9cc233e78ba2fe4a6d + md5: 6c1028898cf3a2032d9af46689e1b81a + depends: + - __glibc >=2.17,<3.0.a0 + - lerc >=4.0.0,<5.0a0 + - libdeflate >=1.23,<1.24.0a0 + - libgcc >=13 + - libjpeg-turbo >=3.1.0,<4.0a0 + - liblzma >=5.8.1,<6.0a0 + - libstdcxx >=13 + - libwebp-base >=1.5.0,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: HPND + purls: [] + size: 429381 + timestamp: 1745372713285 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.1-h7dc4979_0.conda + sha256: 6bc1b601f0d3ee853acd23884a007ac0a0290f3609dabb05a47fc5a0295e2b53 + md5: 2bb9e04e2da869125e2dc334d665f00d + depends: + - __osx >=11.0 + - lerc >=4.0.0,<5.0a0 + - libcxx >=19 + - libdeflate >=1.24,<1.25.0a0 + - libjpeg-turbo >=3.1.0,<4.0a0 + - liblzma >=5.8.1,<6.0a0 + - libwebp-base >=1.6.0,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + - zstd >=1.5.7,<1.6.0a0 + license: HPND + purls: [] + size: 373640 + timestamp: 1758278641520 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.11.3-hfe17d71_0.conda + sha256: ecbf4b7520296ed580498dc66a72508b8a79da5126e1d6dc650a7087171288f9 + md5: 1247168fe4a0b8912e3336bccdbf98a5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 85969 + timestamp: 1768735071295 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.11.3-h2431656_0.conda + sha256: ae1a82e62cd4e3c18e005ae7ff4358ed72b2bfbfe990d5a6a5587f81e9a100dc + md5: 2255add2f6ae77d0a96624a5cbde6d45 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 87916 + timestamp: 1768735311947 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.42-h5347b49_0.conda + sha256: bc1b08c92626c91500fd9f26f2c797f3eb153b627d53e9c13cd167f1e12b2829 + md5: 38ffe67b78c9d4de527be8315e5ada2c + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 40297 + timestamp: 1775052476770 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libuv-1.51.0-hb03c661_1.conda + sha256: c180f4124a889ac343fc59d15558e93667d894a966ec6fdb61da1604481be26b + md5: 0f03292cc56bf91a077a134ea8747118 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 895108 + timestamp: 1753948278280 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.51.0-h6caf38d_1.conda + sha256: 042c7488ad97a5629ec0a991a8b2a3345599401ecc75ad6a5af73b60e6db9689 + md5: c0d87c3c8e075daf1daf6c31b53e8083 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 421195 + timestamp: 1753948426421 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda + sha256: a68280d57dfd29e3d53400409a39d67c4b9515097eba733aa6fe00c880620e2b + md5: 31ad065eda3c2d88f8215b1289df9c89 + depends: + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxrandr >=1.5.5,<2.0a0 + constrains: + - libvulkan-headers 1.4.341.0.* + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 199795 + timestamp: 1770077125520 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda + sha256: 3aed21ab28eddffdaf7f804f49be7a7d701e8f0e46c856d801270b470820a37b + md5: aea31d2e5b1091feca96fcfe945c3cf9 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - libwebp 1.6.0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 429011 + timestamp: 1752159441324 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libwebp-base-1.6.0-h07db88b_0.conda + sha256: a4de3f371bb7ada325e1f27a4ef7bcc81b2b6a330e46fac9c2f78ac0755ea3dd + md5: e5e7d467f80da752be17796b87fe6385 + depends: + - __osx >=11.0 + constrains: + - libwebp 1.6.0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 294974 + timestamp: 1752159906788 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda + sha256: 666c0c431b23c6cec6e492840b176dde533d48b7e6fb8883f5071223433776aa + md5: 92ed62436b625154323d40d5f2f11dd7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - pthread-stubs + - xorg-libxau >=1.0.11,<2.0a0 + - xorg-libxdmcp + license: MIT + license_family: MIT + purls: [] + size: 395888 + timestamp: 1727278577118 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxcb-1.17.0-hdb1d25a_0.conda + sha256: bd3816218924b1e43b275863e21a3e13a5db4a6da74cca8e60bc3c213eb62f71 + md5: af523aae2eca6dfa1c8eec693f5b9a79 + depends: + - __osx >=11.0 + - pthread-stubs + - xorg-libxau >=1.0.11,<2.0a0 + - xorg-libxdmcp + license: MIT + license_family: MIT + purls: [] + size: 323658 + timestamp: 1727278733917 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c + md5: 5aa797f8787fe7a17d1b0821485b5adc + depends: + - libgcc-ng >=12 + license: LGPL-2.1-or-later + purls: [] + size: 100393 + timestamp: 1702724383534 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda + sha256: d2195b5fbcb0af1ff7b345efdf89290c279b8d1d74f325ae0ac98148c375863c + md5: 2bca1fbb221d9c3c8e3a155784bbc2e9 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libxcb >=1.17.0,<2.0a0 + - libxml2 + - libxml2-16 >=2.14.6 + - xkeyboard-config + - xorg-libxau >=1.0.12,<2.0a0 + license: MIT/X11 Derivative + license_family: MIT + purls: [] + size: 837922 + timestamp: 1764794163823 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-h26afc86_0.conda + sha256: ec0735ae56c3549149eebd7dc22c0bed91fd50c02eaa77ff418613ddda190aa8 + md5: e512be7dc1f84966d50959e900ca121f + depends: + - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.1,<6.0a0 + - libxml2-16 2.15.1 ha9997c6_0 + - libzlib >=1.3.1,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 45283 + timestamp: 1761015644057 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.15.2-h8d039ee_0.conda + sha256: 99cb32dd06a2e58c12981b71a84b052293f27b5ab042e3f21d895f5d7ee13eff + md5: e476ba84e57f2bd2004a27381812ad4e + depends: + - __osx >=11.0 + - icu >=78.2,<79.0a0 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.2,<6.0a0 + - libxml2-16 2.15.2 h5ef1a60_0 + - libzlib >=1.3.1,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 41206 + timestamp: 1772704982288 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-ha9997c6_0.conda + sha256: 71436e72a286ef8b57d6f4287626ff91991eb03c7bdbe835280521791efd1434 + md5: e7733bc6785ec009e47a224a71917e84 + depends: + - __glibc >=2.17,<3.0.a0 + - icu >=75.1,<76.0a0 + - libgcc >=14 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.1,<6.0a0 + - libzlib >=1.3.1,<2.0a0 + constrains: + - libxml2 2.15.1 + license: MIT + license_family: MIT + purls: [] + size: 556302 + timestamp: 1761015637262 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-16-2.15.2-h5ef1a60_0.conda + sha256: 6432259204e78c8a8a815afae987fbf60bd722605fe2c4b022e65196b17d4537 + md5: b284e2b02d53ef7981613839fb86beee + depends: + - __osx >=11.0 + - icu >=78.2,<79.0a0 + - libiconv >=1.18,<2.0a0 + - liblzma >=5.8.2,<6.0a0 + - libzlib >=1.3.1,<2.0a0 + constrains: + - libxml2 2.15.2 + license: MIT + license_family: MIT + purls: [] + size: 466220 + timestamp: 1772704950232 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.43-h711ed8c_1.conda + sha256: 0694760a3e62bdc659d90a14ae9c6e132b525a7900e59785b18a08bb52a5d7e5 + md5: 87e6096ec6d542d1c1f8b33245fe8300 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libxml2 + - libxml2-16 >=2.14.6 + license: MIT + license_family: MIT + purls: [] + size: 245434 + timestamp: 1757963724977 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.2-h25fd6f3_2.conda + sha256: 55044c403570f0dc26e6364de4dc5368e5f3fc7ff103e867c487e2b5ab2bcda9 + md5: d87ff7921124eccd67248aa483c23fec + depends: + - __glibc >=2.17,<3.0.a0 + constrains: + - zlib 1.3.2 *_2 + license: Zlib + license_family: Other + purls: [] + size: 63629 + timestamp: 1774072609062 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.2-h8088a28_2.conda + sha256: 361415a698514b19a852f5d1123c5da746d4642139904156ddfca7c922d23a05 + md5: bc5a5721b6439f2f62a84f2548136082 + depends: + - __osx >=11.0 + constrains: + - zlib 1.3.2 *_2 + license: Zlib + license_family: Other + purls: [] + size: 47759 + timestamp: 1774072956767 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2 + sha256: ff94f30b2e86cbad6296cf3e5804d442d9e881f7ba8080d92170981662528c6e + md5: c66fe2d123249af7651ebde8984c51c2 + depends: + - libgcc-ng >=9.3.0 + - libstdcxx-ng >=9.3.0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 168074 + timestamp: 1607309189989 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzopfli-1.0.3-h9f76cd9_0.tar.bz2 + sha256: e3003b8efe551902dc60b21c81d7164b291b26b7862704421368d26ba5c10fa0 + md5: a0758d74f57741aa0d9ede13fd592e56 + depends: + - libcxx >=11.0.0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 147901 + timestamp: 1607309166373 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-22.1.3-hc7d1edf_0.conda + sha256: 71dcf9a9df103f57a0d5b0abc2594a15c2dd3afe52f07ac2d1c471552a61fb8d + md5: 086b00b77f5f0f7ef5c2a99855650df4 + depends: + - __osx >=11.0 + constrains: + - openmp 22.1.3|22.1.3.* + - intel-openmp <0.0a0 + license: Apache-2.0 WITH LLVM-exception + license_family: APACHE + purls: [] + size: 285886 + timestamp: 1775712563398 +- conda: https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.47.0-py310hee1c697_1.conda + sha256: 956f480aebb3975de2d29e42467c1de4766a71c59d5ea20ddd505a3d87807b0d + md5: b00665c4d41d65d6ead9225cd1f6b296 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.2,<2.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - zstd >=1.5.7,<1.6.0a0 + license: BSD-2-Clause + purls: + - pkg:pypi/llvmlite?source=compressed-mapping + size: 34054660 + timestamp: 1776076746219 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/llvmlite-0.47.0-py310h4137262_1.conda + sha256: 5e76fdfd9712dbf55f07febc6a25ee23d4c24aeb46866d57a3fdd59e8b4fb8f7 + md5: 18bb9275a870e9b90eec6b82d30be702 + depends: + - __osx >=11.0 + - libcxx >=19 + - libzlib >=1.3.2,<2.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - zstd >=1.5.7,<1.6.0a0 + license: BSD-2-Clause + purls: + - pkg:pypi/llvmlite?source=hash-mapping + size: 24249986 + timestamp: 1776077524325 +- conda: https://conda.anaconda.org/conda-forge/noarch/lmfit-1.3.4-pyhd8ed1ab_0.conda + sha256: f1b5a1aa7ea6e528967b111e187c6d8b00219c53ecb0b6d6842cd16c688eeea3 + md5: f8cdc37d08f88f8cd64f1252ecb6a7a9 + depends: + - asteval >=1.0.0 + - dill >=0.3.4 + - numpy >=1.19 + - pip + - python >=3.9 + - scipy >=1.6 + - setuptools + - uncertainties >=3.2.1 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/lmfit?source=hash-mapping + size: 86583 + timestamp: 1753035921043 +- conda: https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2 + sha256: 9afe0b5cfa418e8bdb30d8917c5a6cec10372b037924916f1f85b9f4899a67a6 + md5: 91e27ef3d05cc772ce627e51cff111c4 + depends: + - python >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/locket?source=hash-mapping + size: 8250 + timestamp: 1650660473123 +- pypi: https://files.pythonhosted.org/packages/c6/b9/93d71026bf6c4dfe3afc32064a3fcd533d9032c8b97499744a999f97c230/lxml-6.0.4-cp310-cp310-macosx_10_9_universal2.whl + name: lxml + version: 6.0.4 + sha256: 4a2c26422c359e93d97afd29f18670ae2079dbe2dd17469f1e181aa6699e96a7 + requires_dist: + - cssselect>=0.7 ; extra == 'cssselect' + - html5lib ; extra == 'html5' + - beautifulsoup4 ; extra == 'htmlsoup' + - lxml-html-clean ; extra == 'html-clean' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/ef/4a/ac0f195f52fae450338cae90234588a2ead2337440b4e5ff7230775477a3/lxml-6.0.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + name: lxml + version: 6.0.4 + sha256: 2ad61a5fb291e45bb1d680b4de0c99e28547bd249ec57d60e3e59ebe6628a01f + requires_dist: + - cssselect>=0.7 ; extra == 'cssselect' + - html5lib ; extra == 'html5' + - beautifulsoup4 ; extra == 'htmlsoup' + - lxml-html-clean ; extra == 'html-clean' + requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.4.5-py310hde1b0b5_1.conda + sha256: 5dc79d66e7c85867c537ad13d276742314b3e6b87ab8b22ce7e1aac61ce6281e + md5: 4a20c97489a720287cf3d082f1e715c6 + depends: + - python + - lz4-c + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - lz4-c >=1.10.0,<1.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/lz4?source=hash-mapping + size: 41963 + timestamp: 1765026389535 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.4.5-py310h36fcd3f_1.conda + sha256: 7dea991f16c021023281ff3822279c723e321b60b0f9971318b6ad8a0c9545b2 + md5: 35f27356a57639a51c78d5c1e6b290f7 + depends: + - python + - lz4-c + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + - lz4-c >=1.10.0,<1.11.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/lz4?source=hash-mapping + size: 123555 + timestamp: 1765026468864 +- conda: https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda + sha256: 47326f811392a5fd3055f0f773036c392d26fdb32e4d8e7a8197eed951489346 + md5: 9de5350a85c4a20c685259b889aa6393 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 167055 + timestamp: 1733741040117 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-c-1.10.0-h286801f_1.conda + sha256: 94d3e2a485dab8bdfdd4837880bde3dd0d701e2b97d6134b8806b7c8e69c8652 + md5: 01511afc6cc1909c5303cf31be17b44f + depends: + - __osx >=11.0 + - libcxx >=18 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 148824 + timestamp: 1733741047892 +- conda: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.10-pyhcf101f3_1.conda + sha256: 6099a13faaaf22afa8daa273929f393d41140fc03509b4ef1e2f6858b511699d + md5: 99f74609a309e434f25f0ede5f50580c + depends: + - python >=3.10 + - importlib-metadata + - markupsafe >=0.9.2 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/mako?source=hash-mapping + size: 71947 + timestamp: 1764678340587 +- conda: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda + sha256: 7b1da4b5c40385791dbc3cc85ceea9fad5da680a27d5d3cb8bfaa185e304a89e + md5: 5b5203189eb668f042ac2b0826244964 + depends: + - mdurl >=0.1,<1 + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/markdown-it-py?source=hash-mapping + size: 64736 + timestamp: 1754951288511 +- conda: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py310h3406613_1.conda + sha256: 9f3c34f8a7a8dcfed64221a2e19bbe0094ab2c6df7c029b7df713e52c9c9f229 + md5: 671afe636d2a97759804723f5afc22e0 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - jinja2 >=3.0.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=hash-mapping + size: 23899 + timestamp: 1772445369460 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.3-py310hb46c203_1.conda + sha256: c1a7cf542e15d5bcd1efbae5a60a75223f36f4870cc96c19ab05fcde642b0394 + md5: 4d372362aa5dd174b9300828ac29f806 + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - jinja2 >=3.0.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/markupsafe?source=compressed-mapping + size: 23871 + timestamp: 1772445652936 +- conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.10.8-py310hff52083_0.conda + sha256: 6d087ae3f42e5a53f648a874629b561e8ec34416f6a258837ca0af405550defe + md5: e78bcae4f58d0000f756c3b42da20f13 + depends: + - matplotlib-base >=3.10.8,<3.10.9.0a0 + - pyside6 >=6.7.2 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - tornado >=5 + license: PSF-2.0 + license_family: PSF + purls: [] + size: 17450 + timestamp: 1763055406857 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-3.10.8-py310hb6292c7_0.conda + sha256: e19c56288ea1b729089d28614200e8613cdf5107367636736d0d0bf06c750e72 + md5: 33dca3f48bc5d4427238f4f214574f40 + depends: + - matplotlib-base >=3.10.8,<3.10.9.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - tornado >=5 + license: PSF-2.0 + license_family: PSF + purls: [] + size: 17596 + timestamp: 1763055909786 +- conda: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py310hfde16b3_0.conda + sha256: 809eaf93eb1901764c9b75803794c0359dd09366f578a13fdbbbe99824920d2c + md5: 093b60a14d2c0d8c10f17e14a73a60d3 + depends: + - __glibc >=2.17,<3.0.a0 + - contourpy >=1.0.1 + - cycler >=0.10 + - fonttools >=4.22.0 + - freetype + - kiwisolver >=1.3.1 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libgcc >=14 + - libstdcxx >=14 + - numpy >=1.21,<3 + - numpy >=1.23 + - packaging >=20.0 + - pillow >=8 + - pyparsing >=2.3.1 + - python >=3.10,<3.11.0a0 + - python-dateutil >=2.7 + - python_abi 3.10.* *_cp310 + - qhull >=2020.2,<2020.3.0a0 + - tk >=8.6.13,<8.7.0a0 + license: PSF-2.0 + license_family: PSF + purls: + - pkg:pypi/matplotlib?source=hash-mapping + size: 7273307 + timestamp: 1763055380888 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.10.8-py310h0181960_0.conda + sha256: 397a75557d684e4030fcbee1a2adc6669036dd0525d64d6e5c060a5dff7ba027 + md5: f7be8dab2ed23302da20d4c96345eb15 + depends: + - __osx >=11.0 + - contourpy >=1.0.1 + - cycler >=0.10 + - fonttools >=4.22.0 + - freetype + - kiwisolver >=1.3.1 + - libcxx >=19 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - numpy >=1.21,<3 + - numpy >=1.23 + - packaging >=20.0 + - pillow >=8 + - pyparsing >=2.3.1 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python-dateutil >=2.7 + - python_abi 3.10.* *_cp310 + - qhull >=2020.2,<2020.3.0a0 + license: PSF-2.0 + license_family: PSF + purls: + - pkg:pypi/matplotlib?source=hash-mapping + size: 7123730 + timestamp: 1763055863073 +- conda: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.2.1-pyhd8ed1ab_0.conda + sha256: 9d690334de0cd1d22c51bc28420663f4277cfa60d34fa5cad1ce284a13f1d603 + md5: 00e120ce3e40bad7bfc78861ce3c4a25 + depends: + - python >=3.10 + - traitlets + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/matplotlib-inline?source=hash-mapping + size: 15175 + timestamp: 1761214578417 +- conda: https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda + sha256: 9b0037171dad0100f0296699a11ae7d355237b55f42f9094aebc0f41512d96a1 + md5: 827064ddfe0de2917fb29f1da4f8f533 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/mccabe?source=hash-mapping + size: 12934 + timestamp: 1733216573915 +- conda: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda + sha256: 78c1bbe1723449c52b7a9df1af2ee5f005209f67e40b6e1d3c7619127c43b1c7 + md5: 592132998493b3ff25fd7479396e8351 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/mdurl?source=hash-mapping + size: 14465 + timestamp: 1733255681319 +- conda: https://conda.anaconda.org/conda-forge/noarch/minio-7.2.20-pyhd8ed1ab_0.conda + sha256: 132cd2ac509a15cb41a1f9c55f190c2c6ab278a8ee4915b178920c3606beb9af + md5: 3244fc3d4bc0be3ea995df133a4d9436 + depends: + - argon2-cffi + - certifi + - pycryptodome + - python >=3.10 + - typing_extensions + - urllib3 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/minio?source=hash-mapping + size: 69324 + timestamp: 1764207690145 +- pypi: git+https://github.com/ChrisBeaumont/mpl-modest-image?rev=master#4174514a9ce7f4160fb6cbd200df6897694e0ac3 + name: modestimage + version: '0.2' +- conda: https://conda.anaconda.org/conda-forge/noarch/mongomock-4.3.0-pyhd8ed1ab_0.conda + sha256: 047e58ce472555586386fc3b2121ea95ec25d9f27b570a7adb9ccf8cefcb5796 + md5: e3fc737aa291e3966b1ee004c2f81cbb + depends: + - packaging + - python >=3.9 + - pytz + - sentinels + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/mongomock?source=hash-mapping + size: 59687 + timestamp: 1742727718589 +- conda: https://conda.anaconda.org/conda-forge/noarch/mongoquery-1.4.3-pyhd8ed1ab_0.conda + sha256: 8e5fc466a715ef261c44d5965c0bd26507cc99747b0296635b753a7ab998b407 + md5: 404f751bd276eb97293319b9bdd80e38 + depends: + - python >=3.10 + - six + license: Unlicense + purls: + - pkg:pypi/mongoquery?source=hash-mapping + size: 12594 + timestamp: 1758059611138 +- conda: https://conda.anaconda.org/conda-forge/noarch/more-itertools-11.0.2-pyhcf101f3_0.conda + sha256: 74f7b461e0f0e0709a0c8abb018de9ad885258b74790ffda1e750ac5ddde0a85 + md5: b874955758a30a37c78b82ea5cf78fdb + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/more-itertools?source=compressed-mapping + size: 71254 + timestamp: 1775762492525 +- conda: https://conda.anaconda.org/conda-forge/linux-64/mpc-1.4.0-he0a73b1_0.conda + sha256: c1fdeebc9f8e4f51df265efca4ea20c7a13911193cc255db73cccb6e422ae486 + md5: 770d00bf57b5599c4544d61b61d8c6c6 + depends: + - __glibc >=2.17,<3.0.a0 + - gmp >=6.3.0,<7.0a0 + - libgcc >=14 + - mpfr >=4.2.2,<5.0a0 + license: LGPL-3.0-or-later + license_family: LGPL + purls: [] + size: 100245 + timestamp: 1774472435333 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpc-1.4.0-h169892a_0.conda + sha256: a9774664adea222e4165efddcd902641c03c7d08fda3a83a5b0885e675ead309 + md5: 2845c3a1d0d8da1db92aba8323892475 + depends: + - __osx >=11.0 + - gmp >=6.3.0,<7.0a0 + - mpfr >=4.2.2,<5.0a0 + license: LGPL-3.0-or-later + license_family: LGPL + purls: [] + size: 86181 + timestamp: 1774472395307 +- conda: https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.2-he0a73b1_0.conda + sha256: 8690f550a780f75d9c47f7ffc15f5ff1c149d36ac17208e50eda101ca16611b9 + md5: 85ce2ffa51ab21da5efa4a9edc5946aa + depends: + - __glibc >=2.17,<3.0.a0 + - gmp >=6.3.0,<7.0a0 + - libgcc >=14 + license: LGPL-3.0-only + license_family: LGPL + purls: [] + size: 730422 + timestamp: 1773413915171 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/mpfr-4.2.2-h6bc93b0_0.conda + sha256: af5eca85f7ffdd403275e916f1de40a7d4b48ae138f12479523d9500c6a073ba + md5: a47a14da2103c9c7a390f7c8bc8d7f9b + depends: + - __osx >=11.0 + - gmp >=6.3.0,<7.0a0 + license: LGPL-3.0-only + license_family: LGPL + purls: [] + size: 348767 + timestamp: 1773414111071 +- conda: https://conda.anaconda.org/conda-forge/noarch/mpl-scatter-density-0.8-pyhd8ed1ab_1.conda + sha256: b841728ddbee6a82677efefa9ddd704236d0c1f9c4440527ba11e0a8294b4939 + md5: 15f7a27f590c079a0aed4a8f1cee0dac + depends: + - fast-histogram >=0.3 + - matplotlib-base >=3.0 + - numpy + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/mpl-scatter-density?source=hash-mapping + size: 743291 + timestamp: 1745590251486 +- conda: https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py310h03d9f68_1.conda + sha256: 61cf3572d6afa3fa711c5f970a832783d2c281facb7b3b946a6b71a0bac2c592 + md5: 5eea9d8f8fcf49751dab7927cb0dfc3f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/msgpack?source=hash-mapping + size: 95105 + timestamp: 1762504073388 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/msgpack-python-1.1.2-py310h0e897d2_1.conda + sha256: 0fa5e8ebf78e3a27f5e2646b021b2b0988746372dfcd95dd50c2840cef9a0118 + md5: 03c0ac9f01348d35e889dab9b9bb01fb + depends: + - __osx >=11.0 + - libcxx >=19 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/msgpack?source=hash-mapping + size: 83632 + timestamp: 1762504396042 +- conda: https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda + sha256: d09c47c2cf456de5c09fa66d2c3c5035aa1fa228a1983a433c47b876aa16ce90 + md5: 37293a85a0f4f77bbd9cf7aaefc62609 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/munkres?source=hash-mapping + size: 15851 + timestamp: 1749895533014 +- conda: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.1.0-pyha770c72_0.conda + sha256: 6ed158e4e5dd8f6a10ad9e525631e35cee8557718f83de7a4e3966b1f772c4b1 + md5: e9c622e0d00fa24a6292279af3ab6d06 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/mypy-extensions?source=hash-mapping + size: 11766 + timestamp: 1745776666688 +- conda: https://conda.anaconda.org/conda-forge/noarch/narwhals-2.19.0-pyhcf101f3_0.conda + sha256: cac1f5236e9d7d1d90d733254bb26948b7c1b22cfbaffc6ebad3ebe9435f26b1 + md5: b94cbc2227cdca1e9a65d7ad4ee636c1 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/narwhals?source=hash-mapping + size: 281869 + timestamp: 1775500139138 +- conda: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda + sha256: 7a5bd30a2e7ddd7b85031a5e2e14f290898098dc85bea5b3a5bf147c25122838 + md5: bbe1963f1e47f594070ffe87cdf612ea + depends: + - jsonschema >=2.6 + - jupyter_core >=4.12,!=5.0.* + - python >=3.9 + - python-fastjsonschema >=2.15 + - traitlets >=5.1 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/nbformat?source=hash-mapping + size: 100945 + timestamp: 1733402844974 +- conda: https://conda.anaconda.org/conda-forge/noarch/nbstripout-0.9.1-pyhd8ed1ab_0.conda + sha256: e5029a1ca06d5f02c9366ec8686f64c942d77f297bfa90a0a8215083d85a7a01 + md5: 948b10290b9f4ebbb26de6da5c6b7c51 + depends: + - nbformat + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/nbstripout?source=hash-mapping + size: 24331 + timestamp: 1771778886946 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + sha256: 3fde293232fa3fca98635e1167de6b7c7fda83caf24b9d6c91ec9eefb4f4d586 + md5: 47e340acb35de30501a76c7c799c41d7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: X11 AND BSD-3-Clause + purls: [] + size: 891641 + timestamp: 1738195959188 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + sha256: 2827ada40e8d9ca69a153a45f7fd14f32b2ead7045d3bbb5d10964898fe65733 + md5: 068d497125e4bf8a66bf707254fff5ae + depends: + - __osx >=11.0 + license: X11 AND BSD-3-Clause + purls: [] + size: 797030 + timestamp: 1738196177597 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ndindex-1.10.1-py310hea6c23e_0.conda + sha256: 8135d2e31c080389cf2f48797efdfcf3f22e5ac1ea6ce8f940c5b30c48a6c581 + md5: d18ac4997faad0a11f39fddff74596f8 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ndindex?source=hash-mapping + size: 198032 + timestamp: 1763658034538 +- conda: https://conda.anaconda.org/conda-forge/noarch/ndindex-1.8-pyhd8ed1ab_1.conda + sha256: 9d1d9a97dffc08878582644ac36a58a58ab6fd3007829632422eecbf6c9c69ae + md5: 34cd9589f4124ff0016f100dc044180a + depends: + - python >=3.8 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ndindex?source=hash-mapping + size: 70642 + timestamp: 1717010437985 +- conda: https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda + sha256: 39625cd0c9747fa5c46a9a90683b8997d8b9649881b3dc88336b13b7bdd60117 + md5: fd40bf7f7f4bc4b647dc8512053d9873 + depends: + - python >=3.10 + - python + constrains: + - numpy >=1.24 + - scipy >=1.10,!=1.11.0,!=1.11.1 + - matplotlib >=3.7 + - pandas >=2.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/networkx?source=hash-mapping + size: 1265008 + timestamp: 1731521053408 +- conda: https://conda.anaconda.org/conda-forge/linux-64/nh3-0.3.4-py310h6de7dc8_0.conda + noarch: python + sha256: 772ec92277b60a19ccebdd2c8ea8875086a8fe7742e1b3db233c5a6b04367fa0 + md5: 8f94b4cd2c1a0ad78d34c076112d269f + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - _python_abi3_support 1.* + - cpython >=3.10 + constrains: + - __glibc >=2.17 + license: MIT + license_family: MIT + purls: + - pkg:pypi/nh3?source=hash-mapping + size: 677091 + timestamp: 1774451928026 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/nh3-0.3.4-py310hf32026f_0.conda + noarch: python + sha256: b4f36f1731eb0ebdc2821186264227f941d8a29eb6152c6c940a9d8d21d32102 + md5: 4af114f4ad269552fb0f745a1de16283 + depends: + - python + - __osx >=11.0 + - _python_abi3_support 1.* + - cpython >=3.10 + constrains: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/nh3?source=hash-mapping + size: 633131 + timestamp: 1774452148745 +- conda: https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda + sha256: fd2cbd8dfc006c72f45843672664a8e4b99b2f8137654eaae8c3d46dca776f63 + md5: 16c2a0e9c4a166e53632cfca4f68d020 + constrains: + - nlohmann_json-abi ==3.12.0 + license: MIT + license_family: MIT + purls: [] + size: 136216 + timestamp: 1758194284857 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/nlohmann_json-3.12.0-h784d473_1.conda + sha256: 1945fd5b64b74ef3d57926156fb0bfe88ee637c49f3273067f7231b224f1d26d + md5: 755cfa6c08ed7b7acbee20ccbf15a47c + constrains: + - nlohmann_json-abi ==3.12.0 + license: MIT + license_family: MIT + purls: [] + size: 137595 + timestamp: 1768670878127 +- conda: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda + sha256: 4fa40e3e13fc6ea0a93f67dfc76c96190afd7ea4ffc1bac2612d954b42cdc3ee + md5: eb52d14a901e23c39e9e7b4a1a5c015f + depends: + - python >=3.10 + - setuptools + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/nodeenv?source=hash-mapping + size: 40866 + timestamp: 1766261270149 +- conda: https://conda.anaconda.org/conda-forge/noarch/nomkl-1.0-h5ca1d4c_0.tar.bz2 + sha256: d38542a151a90417065c1a234866f97fd1ea82a81de75ecb725955ab78f88b4b + md5: 9a66894dfd07c4510beb6b3f9672ccc0 + constrains: + - mkl <0.a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 3843 + timestamp: 1582593857545 +- conda: https://conda.anaconda.org/conda-forge/linux-64/numba-0.65.0-py310h225f558_1.conda + sha256: 5298cc4bcfe5a61076c70539b70182597495387a866d67fec728b4c79bbb8723 + md5: 3fbd555beb8093338cbc6a95e0530457 + depends: + - __glibc >=2.17,<3.0.a0 + - _openmp_mutex >=4.5 + - libgcc >=14 + - libstdcxx >=14 + - llvmlite >=0.47.0,<0.48.0a0 + - numpy >=1.21,<3 + - numpy >=1.22.3,<2.5 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - libopenblas !=0.3.6 + - tbb >=2021.6.0 + - cudatoolkit >=11.2 + - scipy >=1.0 + - cuda-python >=11.6 + - cuda-version >=11.2 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/numba?source=hash-mapping + size: 4394490 + timestamp: 1776161990499 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/numba-0.65.0-py310h71bca05_1.conda + sha256: 129b22a2a0a72d7e00c8353fbacbc5e9945f5089fca84e363cd2458932d467de + md5: a5eced5c1390cdca4600ccd3e0bcddf7 + depends: + - __osx >=11.0 + - libcxx >=19 + - llvm-openmp >=19.1.7 + - llvm-openmp >=22.1.3 + - llvmlite >=0.47.0,<0.48.0a0 + - numpy >=1.21,<3 + - numpy >=1.22.3,<2.5 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - cudatoolkit >=11.2 + - scipy >=1.0 + - tbb >=2021.6.0 + - libopenblas >=0.3.18,!=0.3.20 + - cuda-python >=11.6 + - cuda-version >=11.2 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/numba?source=hash-mapping + size: 4372579 + timestamp: 1776162536687 +- conda: https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py310h5eaa309_0.conda + sha256: 70cb0fa431ba9e75ef36d94f35324089dfa7da8f967e9c758f60e08aaf29b732 + md5: a3e9933fc59e8bcd2aa20753fb56db42 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libstdcxx >=13 + - msgpack-python + - numpy >=1.19,<3 + - numpy >=1.7 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/numcodecs?source=hash-mapping + size: 802894 + timestamp: 1728547783947 +- pypi: https://files.pythonhosted.org/packages/f3/89/6b07977baf2af75fb6692f9e7a1fb612a15f600fc921f3f565366de01f4a/numexpr-2.14.1-cp310-cp310-macosx_11_0_arm64.whl + name: numexpr + version: 2.14.1 + sha256: 64ae5dfd62d74a3ef82fe0b37f80527247f3626171ad82025900f46ffca4b39a + requires_dist: + - numpy>=1.23.0 + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/numexpr-2.14.1-py310h34a7263_101.conda + sha256: 39476b57c4c286e194c7f817fa26df214e54df04bfbc1da6b0177d058f8e1bb2 + md5: cb02b04ff05ba415b55995bbcc82358f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - nomkl + - numpy >=1.21,<3 + - numpy >=1.23.0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/numexpr?source=hash-mapping + size: 191474 + timestamp: 1762594963166 +- conda: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.6-py310hefbff90_0.conda + sha256: 0ba94a61f91d67413e60fa8daa85627a8f299b5054b0eff8f93d26da83ec755e + md5: b0cea2c364bf65cd19e023040eeab05d + depends: + - __glibc >=2.17,<3.0.a0 + - libblas >=3.9.0,<4.0a0 + - libcblas >=3.9.0,<4.0a0 + - libgcc >=13 + - liblapack >=3.9.0,<4.0a0 + - libstdcxx >=13 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - numpy-base <0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/numpy?source=hash-mapping + size: 7893263 + timestamp: 1747545075833 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/numpy-2.2.6-py310h4d83441_0.conda + sha256: 87704bcd5f4a4f88eaf2a97f07e9825803b58a8003a209b91e89669317523faf + md5: f4bd8ac423d04b3c444b96f2463d3519 + depends: + - __osx >=11.0 + - libblas >=3.9.0,<4.0a0 + - libcblas >=3.9.0,<4.0a0 + - libcxx >=18 + - liblapack >=3.9.0,<4.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - numpy-base <0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/numpy?source=hash-mapping + size: 5841650 + timestamp: 1747545043441 +- conda: https://conda.anaconda.org/conda-forge/noarch/numpydoc-1.10.0-pyhcf101f3_0.conda + sha256: 482d94fce136c4352b18c6397b9faf0a3149bfb12499ab1ffebad8db0cb6678f + md5: 3aa4b625f20f55cf68e92df5e5bf3c39 + depends: + - python >=3.10 + - sphinx >=6 + - tomli >=1.1.0 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/numpydoc?source=hash-mapping + size: 65801 + timestamp: 1764715638266 +- conda: https://conda.anaconda.org/conda-forge/linux-64/obstore-0.9.2-py310hdfeec95_0.conda + sha256: efb7c64faa63c73bde9f6ab4613396c4157db5f19beefa38f17d5330325b8a9f + md5: b5f063b08f69ddb597223bf1bafb97bb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - typing_extensions >=4.0.0 + constrains: + - __glibc >=2.17 + license: MIT + license_family: MIT + purls: + - pkg:pypi/obstore?source=hash-mapping + size: 3233255 + timestamp: 1773327830123 +- conda: https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h55fea9a_1.conda + sha256: 0b7396dacf988f0b859798711b26b6bc9c6161dca21bacfd778473da58730afa + md5: 01243c4aaf71bde0297966125aea4706 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libpng >=1.6.50,<1.7.0a0 + - libstdcxx >=14 + - libtiff >=4.7.0,<4.8.0a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 357828 + timestamp: 1754297886899 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.4-hd9e9057_0.conda + sha256: 60aca8b9f94d06b852b296c276b3cf0efba5a6eb9f25feb8708570d3a74f00e4 + md5: 4b5d3a91320976eec71678fad1e3569b + depends: + - __osx >=11.0 + - libcxx >=19 + - libpng >=1.6.55,<1.7.0a0 + - libtiff >=4.7.1,<4.8.0a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 319697 + timestamp: 1772625397692 +- conda: https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda + sha256: cb0b07db15e303e6f0a19646807715d28f1264c6350309a559702f4f34f37892 + md5: 2e5bf4f1da39c0b32778561c3c4e5878 + depends: + - __glibc >=2.17,<3.0.a0 + - cyrus-sasl >=2.1.27,<3.0a0 + - krb5 >=1.21.3,<1.22.0a0 + - libgcc >=13 + - libstdcxx >=13 + - openssl >=3.5.0,<4.0a0 + license: OLDAP-2.8 + license_family: BSD + purls: [] + size: 780253 + timestamp: 1748010165522 +- conda: https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py310h05b0c27_3.conda + sha256: 6906538d301d8aa586eecaf7b4c0c0640d15544b5e8540b4f2852f0bdb992c90 + md5: 735c6d22328fc75dbe5ff9eaf8aa028b + depends: + - et_xmlfile + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/openpyxl?source=hash-mapping + size: 386806 + timestamp: 1769122094748 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openpyxl-3.1.5-py310hb1d31aa_3.conda + sha256: 6a893f6c82709621306d6f56f7d6d947fa8d567c95be9f0f7a40352017226b0a + md5: 1114c8b368ce828afae8e56f1122fa74 + depends: + - et_xmlfile + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/openpyxl?source=hash-mapping + size: 389108 + timestamp: 1769122502601 +- conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.2-h35e630c_0.conda + sha256: c0ef482280e38c71a08ad6d71448194b719630345b0c9c60744a2010e8a8e0cb + md5: da1b85b6a87e141f5140bb9924cecab0 + depends: + - __glibc >=2.17,<3.0.a0 + - ca-certificates + - libgcc >=14 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 3167099 + timestamp: 1775587756857 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.2-hd24854e_0.conda + sha256: c91bf510c130a1ea1b6ff023e28bac0ccaef869446acd805e2016f69ebdc49ea + md5: 25dcccd4f80f1638428613e0d7c9b4e1 + depends: + - __osx >=11.0 + - ca-certificates + license: Apache-2.0 + license_family: Apache + purls: [] + size: 3106008 + timestamp: 1775587972483 +- conda: https://conda.anaconda.org/conda-forge/linux-64/orc-2.2.1-hd747db4_0.conda + sha256: 8d91d6398fc63a94d238e64e4983d38f6f9555460f11bed00abb2da04dbadf7c + md5: ddab8b2af55b88d63469c040377bd37e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - snappy >=1.2.2,<1.3.0a0 + - tzdata + - zstd >=1.5.7,<1.6.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 1316445 + timestamp: 1759424644934 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.2.1-h4fd0076_0.conda + sha256: f0a31625a647cb8d55a7016950c11f8fabc394df5054d630e9c9b526bf573210 + md5: b5dea50c77ab3cc18df48bdc9994ac44 + depends: + - __osx >=11.0 + - libcxx >=19 + - libprotobuf >=6.31.1,<6.31.2.0a0 + - libzlib >=1.3.1,<2.0a0 + - lz4-c >=1.10.0,<1.11.0a0 + - snappy >=1.2.2,<1.3.0a0 + - tzdata + - zstd >=1.5.7,<1.6.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 487298 + timestamp: 1759424875005 +- conda: https://conda.anaconda.org/conda-forge/linux-64/orjson-3.11.8-py310hfe99b16_0.conda + sha256: 09b983ad381c5c3b9215693df6559f5e7979cfccb990005b8a108cd72d525195 + md5: f48af5a78a30efe5e070711629a36c4d + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + constrains: + - __glibc >=2.17 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/orjson?source=hash-mapping + size: 364687 + timestamp: 1774994025065 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/orjson-3.11.8-py310h38230ac_0.conda + sha256: 8255f129d7ab63169ed82b71a173c6e55fea0ccf659d39a52c31222a4e97c4d3 + md5: 14fb7e3506521dff5477f2d2143d9ecd + depends: + - python + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + constrains: + - __osx >=11.0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/orjson?source=hash-mapping + size: 335668 + timestamp: 1774994135683 +- conda: https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda + sha256: c1fc0f953048f743385d31c468b4a678b3ad20caffdeaa94bed85ba63049fd58 + md5: b76541e68fea4d511b1ac46a28dcd2c6 + depends: + - python >=3.8 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/packaging?source=compressed-mapping + size: 72010 + timestamp: 1769093650580 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py310h0158d43_2.conda + sha256: b9e88fa02fd5e99f54c168df622eda9ddf898cc15e631179963aca51d97244bf + md5: 0610ed073acc4737d036125a5a6dbae2 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - numpy >=1.21,<3 + - numpy >=1.22.4 + - python >=3.10,<3.11.0a0 + - python-dateutil >=2.8.2 + - python-tzdata >=2022.7 + - python_abi 3.10.* *_cp310 + - pytz >=2020.1 + constrains: + - odfpy >=1.4.1 + - pyarrow >=10.0.1 + - pyqt5 >=5.15.9 + - numexpr >=2.8.4 + - fsspec >=2022.11.0 + - bottleneck >=1.3.6 + - beautifulsoup4 >=4.11.2 + - pandas-gbq >=0.19.0 + - s3fs >=2022.11.0 + - gcsfs >=2022.11.0 + - sqlalchemy >=2.0.0 + - pytables >=3.8.0 + - html5lib >=1.1 + - python-calamine >=0.1.7 + - lxml >=4.9.2 + - qtpy >=2.3.0 + - scipy >=1.10.0 + - numba >=0.56.4 + - openpyxl >=3.1.0 + - blosc >=1.21.3 + - pyreadstat >=1.2.0 + - zstandard >=0.19.0 + - xarray >=2022.12.0 + - matplotlib >=3.6.3 + - tabulate >=0.9.0 + - fastparquet >=2022.12.0 + - psycopg2 >=2.9.6 + - xlsxwriter >=3.0.5 + - xlrd >=2.0.1 + - tzdata >=2022.7 + - pyxlsb >=1.0.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pandas?source=hash-mapping + size: 12391209 + timestamp: 1764615007370 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandas-2.3.3-py310h25f4b65_1.conda + sha256: 231f393393c76a483aec78d2c24c48cb97c3dd8328382ba529e8c4c0e7c81922 + md5: 6aa7000c6851bdfbb9a3fe7319f5b5e2 + depends: + - __osx >=11.0 + - libcxx >=19 + - numpy >=1.21,<3 + - numpy >=1.22.4 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python-dateutil >=2.8.2 + - python-tzdata >=2022.7 + - python_abi 3.10.* *_cp310 + - pytz >=2020.1 + constrains: + - pytables >=3.8.0 + - openpyxl >=3.1.0 + - bottleneck >=1.3.6 + - zstandard >=0.19.0 + - pyxlsb >=1.0.10 + - matplotlib >=3.6.3 + - sqlalchemy >=2.0.0 + - beautifulsoup4 >=4.11.2 + - odfpy >=1.4.1 + - python-calamine >=0.1.7 + - html5lib >=1.1 + - fsspec >=2022.11.0 + - blosc >=1.21.3 + - pyqt5 >=5.15.9 + - qtpy >=2.3.0 + - numexpr >=2.8.4 + - pyreadstat >=1.2.0 + - scipy >=1.10.0 + - pandas-gbq >=0.19.0 + - lxml >=4.9.2 + - pyarrow >=10.0.1 + - s3fs >=2022.11.0 + - xlrd >=2.0.1 + - numba >=0.56.4 + - xlsxwriter >=3.0.5 + - tabulate >=0.9.0 + - xarray >=2022.12.0 + - psycopg2 >=2.9.6 + - fastparquet >=2022.12.0 + - tzdata >=2022.7 + - gcsfs >=2022.11.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pandas?source=hash-mapping + size: 11619840 + timestamp: 1759266892769 +- conda: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.6-pyhcf101f3_0.conda + sha256: 42b2d77ccea60752f3aa929a6413a7835aaacdbbde679f2f5870a744fa836b94 + md5: 97c1ce2fffa1209e7afb432810ec6e12 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/parso?source=compressed-mapping + size: 82287 + timestamp: 1770676243987 +- conda: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda + sha256: 472fc587c63ec4f6eba0cc0b06008a6371e0a08a5986de3cf4e8024a47b4fe6c + md5: 0badf9c54e24cecfb0ad2f99d680c163 + depends: + - locket + - python >=3.9 + - toolz + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/partd?source=hash-mapping + size: 20884 + timestamp: 1715026639309 +- conda: https://conda.anaconda.org/conda-forge/noarch/pathspec-1.0.4-pyhd8ed1ab_0.conda + sha256: 29ea20d0faf20374fcd61c25f6d32fb8e9a2c786a7f1473a0c3ead359470fbe1 + md5: 2908273ac396d2cd210a8127f5f1c0d6 + depends: + - python >=3.10 + license: MPL-2.0 + license_family: MOZILLA + purls: + - pkg:pypi/pathspec?source=hash-mapping + size: 53739 + timestamp: 1769677743677 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.46-h1321c63_0.conda + sha256: 5c7380c8fd3ad5fc0f8039069a45586aa452cf165264bc5a437ad80397b32934 + md5: 7fa07cb0fb1b625a089ccc01218ee5b1 + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 1209177 + timestamp: 1756742976157 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pcre2-10.47-h30297fc_0.conda + sha256: 5e2e443f796f2fd92adf7978286a525fb768c34e12b1ee9ded4000a41b2894ba + md5: 9b4190c4055435ca3502070186eba53a + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 850231 + timestamp: 1763655726735 +- conda: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda + sha256: 202af1de83b585d36445dc1fda94266697341994d1a3328fabde4989e1b3d07a + md5: d0d408b1f18883a944376da5cf8101ea + depends: + - ptyprocess >=0.5 + - python >=3.9 + license: ISC + purls: + - pkg:pypi/pexpect?source=hash-mapping + size: 53561 + timestamp: 1733302019362 +- conda: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda + sha256: e2ac3d66c367dada209fc6da43e645672364b9fd5f9d28b9f016e24b81af475b + md5: 11a9d1d09a3615fc07c3faf79bc0b943 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pickleshare?source=hash-mapping + size: 11748 + timestamp: 1733327448200 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pillow-11.3.0-py310h6557065_3.conda + sha256: 7fe27fd1c5a3d85ea355a609d050e50469382223bbf5a07ca750e30b6aebdc25 + md5: e169733dc0c743687a852f1c6e989140 + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - libtiff >=4.7.0,<4.8.0a0 + - libwebp-base >=1.6.0,<2.0a0 + - libxcb >=1.17.0,<2.0a0 + - python_abi 3.10.* *_cp310 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - lcms2 >=2.17,<3.0a0 + - tk >=8.6.13,<8.7.0a0 + - libjpeg-turbo >=3.1.0,<4.0a0 + - openjpeg >=2.5.3,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + license: HPND + purls: + - pkg:pypi/pillow?source=hash-mapping + size: 882171 + timestamp: 1758208668856 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-12.0.0-py310hcac772a_1.conda + sha256: 9641a35e0a3c0d6e66fa00c730adbf205538aed65dedb668045744422f49952d + md5: 4a42863ff61296f8be72a74878b5fda0 + depends: + - python + - __osx >=11.0 + - python 3.10.* *_cpython + - libwebp-base >=1.6.0,<2.0a0 + - tk >=8.6.13,<8.7.0a0 + - openjpeg >=2.5.4,<3.0a0 + - zlib-ng >=2.2.5,<2.3.0a0 + - lcms2 >=2.17,<3.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libtiff >=4.7.1,<4.8.0a0 + - libjpeg-turbo >=3.1.2,<4.0a0 + - python_abi 3.10.* *_cp310 + - libxcb >=1.17.0,<2.0a0 + license: HPND + purls: + - pkg:pypi/pillow?source=hash-mapping + size: 805080 + timestamp: 1764033310541 +- conda: https://conda.anaconda.org/conda-forge/noarch/pims-0.7-pyhd8ed1ab_1.conda + sha256: cc9521b3a517c9c0f5097a96ed2285b89ba3ee291320a26100261fea2130f8bf + md5: 146adfd93cac5e7c6b5def8f39c917cd + depends: + - imageio + - jinja2 + - numpy >=1.19 + - packaging + - pillow + - python >=3.9 + - slicerator >=1.1.0 + - tifffile + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pims?source=hash-mapping + size: 71357 + timestamp: 1734051228623 +- conda: https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda + sha256: 8e1497814a9997654ed7990a79c054ea5a42545679407acbc6f7e809c73c9120 + md5: 67bdec43082fd8a9cffb9484420b39a2 + depends: + - python >=3.10,<3.13.0a0 + - setuptools + - wheel + license: MIT + license_family: MIT + purls: + - pkg:pypi/pip?source=compressed-mapping + size: 1181790 + timestamp: 1770270305795 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda + sha256: 43d37bc9ca3b257c5dd7bf76a8426addbdec381f6786ff441dc90b1a49143b6a + md5: c01af13bdc553d1a8fbfff6e8db075f0 + depends: + - libgcc >=14 + - libstdcxx >=14 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + license: MIT + license_family: MIT + purls: [] + size: 450960 + timestamp: 1754665235234 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.46.4-h81086ad_1.conda + sha256: 29c9b08a9b8b7810f9d4f159aecfd205fce051633169040005c0b7efad4bc718 + md5: 17c3d745db6ea72ae2fce17e7338547f + depends: + - __osx >=11.0 + - libcxx >=19 + license: MIT + license_family: MIT + purls: [] + size: 248045 + timestamp: 1754665282033 +- conda: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.6-pyhcf101f3_0.conda + sha256: 8f29915c172f1f7f4f7c9391cd5dac3ebf5d13745c8b7c8006032615246345a5 + md5: 89c0b6d1793601a2a3a3f7d2d3d8b937 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/platformdirs?source=compressed-mapping + size: 25862 + timestamp: 1775741140609 +- conda: https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda + sha256: e14aafa63efa0528ca99ba568eaf506eb55a0371d12e6250aaaa61718d2eb62e + md5: d7585b6550ad04c8c5e21097ada2888e + depends: + - python >=3.9 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/pluggy?source=compressed-mapping + size: 25877 + timestamp: 1764896838868 +- conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda + sha256: 5b81b7516d4baf43d0c185896b245fa7384b25dc5615e7baa504b7fa4e07b706 + md5: 7f3ac694319c7eaf81a0325d6405e974 + depends: + - cfgv >=2.0.0 + - identify >=1.0.0 + - nodeenv >=0.11.1 + - python >=3.10 + - pyyaml >=5.1 + - virtualenv >=20.10.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pre-commit?source=hash-mapping + size: 200827 + timestamp: 1765937577534 +- conda: https://conda.anaconda.org/conda-forge/noarch/pre-commit-hooks-5.0.0-pyhd8ed1ab_2.conda + sha256: b3c0e650280e660268c5c3a609c1d008fab598c41eb310f5c6993590889625e7 + md5: f41a1e00c55bc911fcc9cab2a88b4a66 + depends: + - python >=3.9 + - ruamel.yaml >=0.15 + - tomli >=1.1.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pre-commit-hooks?source=hash-mapping + size: 34986 + timestamp: 1734603755600 +- conda: https://conda.anaconda.org/conda-forge/noarch/prettytable-3.17.0-pyhd8ed1ab_0.conda + sha256: a1cc667bd683f26c319ccab257cd3e17b33f34ef90ff4f548a811a342358c952 + md5: 9a12c482f559d39f3ed9550ba9e0eeb0 + depends: + - python >=3.10 + - wcwidth + constrains: + - ptable >=9999 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/prettytable?source=hash-mapping + size: 35808 + timestamp: 1763199361018 +- conda: https://conda.anaconda.org/conda-forge/linux-64/prometheus-cpp-1.3.0-ha5d0236_0.conda + sha256: 013669433eb447548f21c3c6b16b2ed64356f726b5f77c1b39d5ba17a8a4b8bc + md5: a83f6a2fdc079e643237887a37460668 + depends: + - __glibc >=2.17,<3.0.a0 + - libcurl >=8.10.1,<9.0a0 + - libgcc >=13 + - libstdcxx >=13 + - libzlib >=1.3.1,<2.0a0 + - zlib + license: MIT + license_family: MIT + purls: [] + size: 199544 + timestamp: 1730769112346 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/prometheus-cpp-1.3.0-h0967b3e_0.conda + sha256: 851a77ae1a8e90db9b9f3c4466abea7afb52713c3d98ceb0d37ba6ff27df2eff + md5: 7172339b49c94275ba42fec3eaeda34f + depends: + - __osx >=11.0 + - libcurl >=8.10.1,<9.0a0 + - libcxx >=18 + - libzlib >=1.3.1,<2.0a0 + - zlib + license: MIT + license_family: MIT + purls: [] + size: 173220 + timestamp: 1730769371051 +- conda: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.25.0-pyhd8ed1ab_0.conda + sha256: 4d7ec90d4f9c1f3b4a50623fefe4ebba69f651b102b373f7c0e9dbbfa43d495c + md5: a11ab1f31af799dd93c3a39881528884 + depends: + - python >=3.10 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/prometheus-client?source=compressed-mapping + size: 57113 + timestamp: 1775771465170 +- conda: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.52-pyha770c72_0.conda + sha256: 4817651a276016f3838957bfdf963386438c70761e9faec7749d411635979bae + md5: edb16f14d920fb3faf17f5ce582942d6 + depends: + - python >=3.10 + - wcwidth + constrains: + - prompt_toolkit 3.0.52 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/prompt-toolkit?source=hash-mapping + size: 273927 + timestamp: 1756321848365 +- conda: https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py310h139afa4_0.conda + sha256: 3a6d46033ebad3e69ded3f76852b9c378c2cff632f57421b5926c6add1bae475 + md5: d210342acdb8e3ca6434295497c10b7c + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/psutil?source=hash-mapping + size: 179015 + timestamp: 1769678154886 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-7.2.2-py310haea493c_0.conda + sha256: f2336814f97e23cec0c6b082afecc2e9ec5941fce19f08d453e8ea53221c82ef + md5: 8c73aa05dd807bb9b1cdc0e028ab4f13 + depends: + - python + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/psutil?source=hash-mapping + size: 192483 + timestamp: 1769678341407 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda + sha256: 9c88f8c64590e9567c6c80823f0328e58d3b1efb0e1c539c0315ceca764e0973 + md5: b3c17d95b5a10c6e64a21fa17573e70e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + purls: [] + size: 8252 + timestamp: 1726802366959 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pthread-stubs-0.4-hd74edd7_1002.conda + sha256: 8ed65e17fbb0ca944bfb8093b60086e3f9dd678c3448b5de212017394c247ee3 + md5: 415816daf82e0b23a736a069a75e9da7 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 8381 + timestamp: 1726802424786 +- conda: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda + sha256: a7713dfe30faf17508ec359e0bc7e0983f5d94682492469bd462cdaae9c64d83 + md5: 7d9daffbb8d8e0af0f769dbbcd173a54 + depends: + - python >=3.9 + license: ISC + purls: + - pkg:pypi/ptyprocess?source=hash-mapping + size: 19457 + timestamp: 1733302371990 +- conda: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda + sha256: 71bd24600d14bb171a6321d523486f6a06f855e75e547fa0cb2a0953b02047f0 + md5: 3bfdfb8dbcdc4af1ae3f9a8eb3948f04 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pure-eval?source=hash-mapping + size: 16668 + timestamp: 1733569518868 +- conda: https://conda.anaconda.org/conda-forge/noarch/py-cpuinfo-9.0.0-pyhd8ed1ab_1.conda + sha256: 6d8f03c13d085a569fde931892cded813474acbef2e03381a1a87f420c7da035 + md5: 46830ee16925d5ed250850503b5dc3a8 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/py-cpuinfo?source=hash-mapping + size: 25766 + timestamp: 1733236452235 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-22.0.0-py310hff52083_2.conda + sha256: 29bcc36c43384f83653a4ce07baf93dd01afffa0d1253e9fc06ab7f5e754e7d1 + md5: 4687aeb3ae4f26f21ce6e28e2230637c + depends: + - libarrow-acero 22.0.0.* + - libarrow-dataset 22.0.0.* + - libarrow-substrait 22.0.0.* + - libparquet 22.0.0.* + - pyarrow-core 22.0.0 *_2_* + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 33014 + timestamp: 1770652422818 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-22.0.0-py310hb6292c7_2.conda + sha256: 55032270cb43fb81afd84f90d2fad679b26e390ab3f1039e47146a6b898b66bf + md5: 4f460ac751d73a33dc737a2299b4bcaf + depends: + - libarrow-acero 22.0.0.* + - libarrow-dataset 22.0.0.* + - libarrow-substrait 22.0.0.* + - libparquet 22.0.0.* + - pyarrow-core 22.0.0 *_2_* + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: [] + size: 33128 + timestamp: 1770652715553 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-22.0.0-py310h923f568_2_cpu.conda + build_number: 2 + sha256: 7e98c4e82cfde80af62b4975fa2a5663e09240e3b49da785850c201dc7f9d02b + md5: 526b7d4e819e2e9f414f85f43859c2d3 + depends: + - __glibc >=2.17,<3.0.a0 + - libarrow 22.0.0.* *cpu + - libarrow-compute 22.0.0.* *cpu + - libgcc >=14 + - libstdcxx >=14 + - libzlib >=1.3.1,<2.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - numpy >=1.23,<3 + - apache-arrow-proc * cpu + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/pyarrow?source=hash-mapping + size: 5211198 + timestamp: 1770652396982 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-22.0.0-py310h92b138f_2_cpu.conda + build_number: 2 + sha256: 50bfc981daad53fea77a2b3e76549b77e67ebf5e0c7cf48decc8f25a9fbdc9a2 + md5: 6ff2f5482d7f26ad9d9b92075ff91b3e + depends: + - __osx >=11.0 + - libarrow 22.0.0.* *cpu + - libarrow-compute 22.0.0.* *cpu + - libcxx >=18 + - libzlib >=1.3.1,<2.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - apache-arrow-proc * cpu + - numpy >=1.23,<3 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/pyarrow?source=hash-mapping + size: 4595430 + timestamp: 1770652648537 +- conda: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.3-pyhcf101f3_0.conda + sha256: 6fd53b7a2793404aef62313ff2fcfef0c661d6b71de90ef3d38c0908249eea76 + md5: f5a488544d2eb37f46b3bebf1f378337 + depends: + - python >=3.10 + - python + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/pyasn1?source=hash-mapping + size: 66593 + timestamp: 1773729387446 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pycairo-1.29.0-py310h8c3e0f7_1.conda + sha256: cb38233650ca89b11f0f2bd63eb87771821698cc23ff14fffdf9b3f0a6af09c2 + md5: 3c745bc64fa081b67605263e47b80a16 + depends: + - __glibc >=2.17,<3.0.a0 + - cairo >=1.18.4,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: LGPL-2.1-only OR MPL-1.1 + purls: + - pkg:pypi/pycairo?source=hash-mapping + size: 119200 + timestamp: 1770726339582 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pycairo-1.29.0-py310h6464c50_1.conda + sha256: 54952fa09f80e334484e211ef1ec9f9eae9204c692bc4690c334bd9a7c84b122 + md5: ea684b741ef85b963c67cd947cebfa23 + depends: + - __osx >=11.0 + - cairo >=1.18.4,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: LGPL-2.1-only OR MPL-1.1 + purls: + - pkg:pypi/pycairo?source=hash-mapping + size: 105493 + timestamp: 1770726627937 +- conda: https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.14.0-pyhd8ed1ab_0.conda + sha256: 1950f71ff44e64163e176b1ca34812afc1a104075c3190de50597e1623eb7d53 + md5: 85815c6a22905c080111ec8d56741454 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pycodestyle?source=hash-mapping + size: 35182 + timestamp: 1750616054854 +- conda: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda + sha256: 79db7928d13fab2d892592223d7570f5061c192f27b9febd1a418427b719acc6 + md5: 12c566707c80111f9799308d9e265aef + depends: + - python >=3.9 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pycparser?source=hash-mapping + size: 110100 + timestamp: 1733195786147 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pycryptodome-3.23.0-py310he45356f_2.conda + sha256: c23fdc9afbdac8113418b6b76e3b3108e5d82affc6225138916c9c41b5d8c0a8 + md5: b4b7fc99fac71b590f340edfd86958e2 + depends: + - __glibc >=2.17,<3.0.a0 + - gmp >=6.3.0,<7.0a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/pycryptodome?source=hash-mapping + size: 1521308 + timestamp: 1768755539095 +- conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.13.0-pyhcf101f3_0.conda + sha256: 237330a57a9d4d742cdf22259daafada9f287b68da9ffccdf138af4647d0910f + md5: c176d6075acee8d6847988b7865bd1af + depends: + - typing-inspection >=0.4.2 + - typing_extensions >=4.14.1 + - python >=3.10 + - annotated-types >=0.6.0 + - pydantic-core ==2.46.0 + - python + license: MIT + purls: + - pkg:pypi/pydantic?source=compressed-mapping + size: 346673 + timestamp: 1776083858303 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.46.0-py310hd8f68c5_0.conda + sha256: b3315f09613dbbee90dab33f898aec9de16cc39bcb0a9d66f2f92660d7e9603d + md5: f0990b2ae1eb4c0feea2b84a99c4904f + depends: + - python + - typing-extensions >=4.6.0,!=4.7.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python_abi 3.10.* *_cp310 + constrains: + - __glibc >=2.17 + license: MIT + purls: + - pkg:pypi/pydantic-core?source=hash-mapping + size: 1894185 + timestamp: 1776075323623 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.46.0-py310h9365ca8_0.conda + sha256: 1130df6d9588affe13f035cacc9fdf2fcb17c26b7f365de05fe037a565b0fcca + md5: 6ae155cd22213ad93b34c3714270c939 + depends: + - python + - typing-extensions >=4.6.0,!=4.7.0 + - __osx >=11.0 + - python 3.10.* *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - __osx >=11.0 + license: MIT + purls: + - pkg:pypi/pydantic-core?source=hash-mapping + size: 1733458 + timestamp: 1776075443400 +- conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-extra-types-2.11.2-pyhcf101f3_0.conda + sha256: 0e86d31f300abe09d958d8a02c164e742b4cfe7403955a24ca02b498d50251c7 + md5: f9acfec2bcfcf6f43594674389da835e + depends: + - python >=3.10 + - pydantic >=2.5.2 + - python + constrains: + - phonenumbers >=8,<9 + - pycountry >=23 + - semver >=3.0.2,<4 + - python-ulid >=1,<4 + - pendulum >=3.0.0,<4.0.0 + - pytz >=2024.1 + - tzdata >=2024a + license: MIT + license_family: MIT + purls: + - pkg:pypi/pydantic-extra-types?source=hash-mapping + size: 73947 + timestamp: 1775429718410 +- conda: https://conda.anaconda.org/conda-forge/noarch/pydantic-settings-2.13.1-pyhd8ed1ab_0.conda + sha256: 343988d65c08477a87268d4fbeba59d0295514143965d2755ac4519b73155479 + md5: cc0da73801948100ae97383b8da12993 + depends: + - pydantic >=2.7.0 + - python >=3.10 + - python-dotenv >=0.21.0 + - typing-inspection >=0.4.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pydantic-settings?source=hash-mapping + size: 49319 + timestamp: 1771527313149 +- pypi: https://files.pythonhosted.org/packages/82/83/7dafb09fbc3efe9d00c4667d22b32b53d08e8a676fa164c6dd8f5debe85e/pyepics-3.5.9-py3-none-any.whl + name: pyepics + version: 3.5.9 + sha256: b9863cc55a58542f0a28ad04621d4471f649e9cacfa4ccf346a58d6ba158640c + requires_dist: + - numpy>=1.26 + - pyparsing + - sphinx ; extra == 'doc' + - numpydoc ; extra == 'doc' + - coverage ; extra == 'test' + - pytest ; extra == 'test' + - pytest-cov ; extra == 'test' + - psutil ; sys_platform == 'linux' and extra == 'test' + - pyepics[doc,test] ; extra == 'all' + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyerfa-2.0.1.5-py310h32771cd_2.conda + noarch: python + sha256: a3f25f921be09e15ed6ff46a1ec99ce9cca6affa4a086f6f39ad630e21e48fb7 + md5: e6efd9593a25d093b4ce9dd8053c4af7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - numpy >=1.21,<3 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pyerfa?source=hash-mapping + size: 295617 + timestamp: 1756821497270 +- pypi: https://files.pythonhosted.org/packages/56/22/9e726b9537ec8ebe9981dc3dce07d4e72b059ad0546bc19ca16771ee23a5/pyfai-2026.3.0-cp310-cp310-macosx_11_0_arm64.whl + name: pyfai + version: 2026.3.0 + sha256: 95edd242ce7a40a83dd1c2d4907f2c8874b518df1baefa563ce383b7f3af5138 + requires_dist: + - numpy>=1.10 + - h5py + - fabio + - silx>=2 + - numexpr!=2.8.6 + - scipy + - matplotlib + - pyside6 ; extra == 'gui' + - pyopencl ; extra == 'opencl' + - pyside6 ; extra == 'all' + - pyopencl ; extra == 'all' + - hdf5plugin ; extra == 'all' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/a0/e3/913ca30973886fa9ef5f1a78cf6a5a206ee76d8da2033ad67c4b8beec6b2/pyfai-2026.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + name: pyfai + version: 2026.3.0 + sha256: 3aa6d4f1a07766fcf4d768f6116edef805fa5a5181762617284a0d5df8fdcba6 + requires_dist: + - numpy>=1.10 + - h5py + - fabio + - silx>=2 + - numexpr!=2.8.6 + - scipy + - matplotlib + - pyside6 ; extra == 'gui' + - pyopencl ; extra == 'opencl' + - pyside6 ; extra == 'all' + - pyopencl ; extra == 'all' + - hdf5plugin ; extra == 'all' + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.4.0-pyhd8ed1ab_0.conda + sha256: 4b6fb3f7697b4e591c06149671699777c71ca215e9ec16d5bd0767425e630d65 + md5: dba204e749e06890aeb3756ef2b1bf35 + depends: + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyflakes?source=hash-mapping + size: 59592 + timestamp: 1750492011671 +- conda: https://conda.anaconda.org/conda-forge/noarch/pygments-2.20.0-pyhd8ed1ab_0.conda + sha256: cf70b2f5ad9ae472b71235e5c8a736c9316df3705746de419b59d442e8348e86 + md5: 16c18772b340887160c79a6acc022db0 + depends: + - python >=3.10 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/pygments?source=compressed-mapping + size: 893031 + timestamp: 1774796815820 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pymongo-4.16.0-py310hea6c23e_0.conda + sha256: 68b47893ea255cff606610900014cd284dc7b85a1e3e5f9beaad4ba730921b0e + md5: 422b36ae62da3dde3bb9ffea409502d2 + depends: + - __glibc >=2.17,<3.0.a0 + - dnspython <3.0.0,>=2.6.1 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/pymongo?source=hash-mapping + size: 2186657 + timestamp: 1768121025700 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pymongo-4.16.0-py310h8616463_0.conda + sha256: 1e6bcf503324ea09399864bd930efcbc236bc86d87ec3c70300dff0b01e1aa64 + md5: c0ae4c240774ce923db763c966953120 + depends: + - __osx >=11.0 + - dnspython <3.0.0,>=2.6.1 + - libcxx >=19 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/pymongo?source=hash-mapping + size: 2162823 + timestamp: 1768121465800 +- conda: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda + sha256: 417fba4783e528ee732afa82999300859b065dc59927344b4859c64aae7182de + md5: 3687cc0b82a8b4c17e1f0eb7e47163d5 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyparsing?source=compressed-mapping + size: 110893 + timestamp: 1769003998136 +- pypi: https://files.pythonhosted.org/packages/11/64/42ec1b0bd72d87f87bde6ceb6869f444d91a2d601f2e67cd05febc0346a1/PyQt5-5.15.11-cp38-abi3-macosx_11_0_arm64.whl + name: pyqt5 + version: 5.15.11 + sha256: c8b03dd9380bb13c804f0bdb0f4956067f281785b5e12303d529f0462f9afdc2 + requires_dist: + - pyqt5-sip>=12.15,<13 + - pyqt5-qt5>=5.15.2,<5.16.0 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/b4/8c/4065950f9d013c4b2e588fe33cf04e564c2322842d84dbcbce5ba1dc28b0/PyQt5-5.15.11-cp38-abi3-manylinux_2_17_x86_64.whl + name: pyqt5 + version: 5.15.11 + sha256: cd672a6738d1ae33ef7d9efa8e6cb0a1525ecf53ec86da80a9e1b6ec38c8d0f1 + requires_dist: + - pyqt5-sip>=12.15,<13 + - pyqt5-qt5>=5.15.2,<5.16.0 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/24/8e/76366484d9f9dbe28e3bdfc688183433a7b82e314216e9b14c89e5fab690/pyqt5_qt5-5.15.18-py3-none-macosx_11_0_arm64.whl + name: pyqt5-qt5 + version: 5.15.18 + sha256: c656af9c1e6aaa7f59bf3d8995f2fa09adbf6762b470ed284c31dca80d686a26 +- pypi: https://files.pythonhosted.org/packages/9a/46/ffe177f99f897a59dc237a20059020427bd2d3853d713992b8081933ddfe/pyqt5_qt5-5.15.18-py3-none-manylinux2014_x86_64.whl + name: pyqt5-qt5 + version: 5.15.18 + sha256: bf2457e6371969736b4f660a0c153258fa03dbc6a181348218e6f05421682af7 +- pypi: https://files.pythonhosted.org/packages/2c/8e/9db5c0756134a6501ecf7d472a92f64d2d0b8033b4597e73f138a4cfb605/pyqt5_sip-12.18.0-cp310-cp310-macosx_10_9_universal2.whl + name: pyqt5-sip + version: 12.18.0 + sha256: dc5c30bb9bd8cca6e13d2fedac6e2a75fe2168d798d66f76e74835546b025027 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/6c/4a/c66dfd090d93ef6f3e30093b55bf786c24608258d157cb1951e28c5a2725/pyqt5_sip-12.18.0-cp310-cp310-manylinux1_x86_64.manylinux_2_5_x86_64.whl + name: pyqt5-sip + version: 12.18.0 + sha256: b4b743566f67cabfa7b0ba2da30b46d9d635d16cd5bc8cdcec83f620ed14dd93 + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyside6-6.9.2-py310h2007e60_2.conda + sha256: cf71f4e2889e243f41a7654298d45e96f5550576b9249965c7b8b0e9ef284ce0 + md5: 6a67f07cbb3a16ffc4ba98fa14066c0e + depends: + - __glibc >=2.17,<3.0.a0 + - libclang13 >=21.1.2 + - libegl >=1.7.0,<2.0a0 + - libgcc >=14 + - libgl >=1.7.0,<2.0a0 + - libopengl >=1.7.0,<2.0a0 + - libstdcxx >=14 + - libvulkan-loader >=1.4.313.0,<2.0a0 + - libxml2 + - libxml2-16 >=2.14.6 + - libxslt >=1.1.43,<2.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - qt6-main 6.9.2.* + - qt6-main >=6.9.2,<6.10.0a0 + license: LGPL-3.0-only + license_family: LGPL + purls: + - pkg:pypi/pyside6?source=hash-mapping + - pkg:pypi/shiboken6?source=hash-mapping + size: 10116487 + timestamp: 1758935955533 +- conda: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda + sha256: ba3b032fa52709ce0d9fd388f63d330a026754587a2f461117cac9ab73d8d0d8 + md5: 461219d1a5bd61342293efa2c0c90eac + depends: + - __unix + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/pysocks?source=hash-mapping + size: 21085 + timestamp: 1733217331982 +- conda: https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.3-pyhc364b38_1.conda + sha256: 960f59442173eee0731906a9077bd5ccf60f4b4226f05a22d1728ab9a21a879c + md5: 6a991452eadf2771952f39d43615bb3e + depends: + - colorama >=0.4 + - pygments >=2.7.2 + - python >=3.10 + - iniconfig >=1.0.1 + - packaging >=22 + - pluggy >=1.5,<2 + - tomli >=1 + - exceptiongroup >=1 + - python + constrains: + - pytest-faulthandler >=2 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pytest?source=compressed-mapping + size: 299984 + timestamp: 1775644472530 +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.20-h3c07f61_0_cpython.conda + sha256: 8ff2ce308faf2588b69c65b120293f59a8f2577b772b34df4e817d220b09e081 + md5: 5d4e2b00d99feacd026859b7fa239dc0 + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.7.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - libgcc >=14 + - liblzma >=5.8.2,<6.0a0 + - libnsl >=2.0.1,<2.1.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libuuid >=2.41.3,<3.0a0 + - libxcrypt >=4.4.36 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.5,<4.0a0 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + constrains: + - python_abi 3.10.* *_cp310 + license: Python-2.0 + purls: [] + size: 25455342 + timestamp: 1772729810280 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.20-h1b19095_0_cpython.conda + sha256: f0f6fcbb6cfdee5a6b9c03b5b94d2bbe737f3b17a618006c7685cc48992ae667 + md5: 55ec25b0d09379eb11c32dbe09ee28c4 + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.7.4,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.8.2,<6.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.5,<4.0a0 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + constrains: + - python_abi 3.10.* *_cp310 + license: Python-2.0 + purls: [] + size: 12468674 + timestamp: 1772730636766 +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-blosc2-2.7.1-py310h8713f2e_1.conda + sha256: b3d4f31aa2e6000198b14dd62cdd7cfa04e4d0dbe16238b807655416059da910 + md5: 9327d379a776d96179c33a42b1574508 + depends: + - __glibc >=2.17,<3.0.a0 + - c-blosc2 >=2.15.1,<2.16.0a0 + - libgcc >=13 + - libstdcxx >=13 + - msgpack-python + - ndindex >=1.4 + - numexpr + - numpy >=1.19,<3 + - numpy >=1.20.3 + - py-cpuinfo + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/blosc2?source=hash-mapping + size: 357195 + timestamp: 1732718590813 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda + sha256: d6a17ece93bbd5139e02d2bd7dbfa80bee1a4261dced63f65f679121686bf664 + md5: 5b8d21249ff20967101ffa321cab24e8 + depends: + - python >=3.9 + - six >=1.5 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/python-dateutil?source=hash-mapping + size: 233310 + timestamp: 1751104122689 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.2.2-pyhcf101f3_0.conda + sha256: 498ad019d75ba31c7891dc6d9efc8a7ed48cd5d5973f3a9377eb1b174577d3db + md5: feb2e11368da12d6ce473b6573efab41 + depends: + - python >=3.10 + - filelock >=3.15.4 + - platformdirs <5,>=4.3.6 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/python-discovery?source=hash-mapping + size: 34341 + timestamp: 1775586706825 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-dotenv-1.2.2-pyhcf101f3_0.conda + sha256: 74e417a768f59f02a242c25e7db0aa796627b5bc8c818863b57786072aeb85e5 + md5: 130584ad9f3a513cdd71b1fdc1244e9c + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/python-dotenv?source=compressed-mapping + size: 27848 + timestamp: 1772388605021 +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-duckdb-1.3.2-py310hea6c23e_0.conda + sha256: bf71253407d1d2be05e29254bb492059808df988cb536c304feb130f84df6d8d + md5: cfa76039ed2d0af6ec4e23f7e4429c18 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/duckdb?source=hash-mapping + size: 24456144 + timestamp: 1752086885131 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.2-pyhe01879c_0.conda + sha256: df9aa74e9e28e8d1309274648aac08ec447a92512c33f61a8de0afa9ce32ebe8 + md5: 23029aae904a2ba587daba708208012f + depends: + - python >=3.9 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/fastjsonschema?source=hash-mapping + size: 244628 + timestamp: 1755304154927 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-gil-3.10.20-hd8ed1ab_0.conda + sha256: 091050470b40e33ba730455f5be1a0780f62070d254ad9378a3778936aacfae8 + md5: f1bbb89516401b50c98046ec41ece796 + depends: + - cpython 3.10.20.* + - python_abi * *_cp310 + license: Python-2.0 + purls: [] + size: 50927 + timestamp: 1772728963639 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-jose-3.5.0-pyhff2d567_0.conda + sha256: 785a3be2b9ce6d2f2f480bf1805c737f17e84c7e6382162eb83aea7d19089b87 + md5: 1b8523e5a0a5809e42c0f53a648efb28 + depends: + - cryptography >=3.4.0 + - ecdsa !=0.15 + - pyasn1 >=0.5.0 + - python >=3.9 + - rsa >=4.0,<5.0,!=4.4,!=4.1.1 + license: MIT + license_family: MIT + purls: + - pkg:pypi/python-jose?source=hash-mapping + size: 76008 + timestamp: 1748530600158 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-multipart-0.0.26-pyhcf101f3_0.conda + sha256: 2c683f35fac0e6d5c314872bbe89f1ff9100e93a6ed3c2c249b5ba7f95baa139 + md5: 8fe038d7eacf6aa4e0885f6ae9c560e5 + depends: + - python >=3.10 + - python + license: Apache-2.0 + purls: + - pkg:pypi/python-multipart?source=hash-mapping + size: 37352 + timestamp: 1775852154703 +- conda: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2026.1-pyhd8ed1ab_0.conda + sha256: b5494ef54bc2394c6c4766ceeafac22507c4fc60de6cbfda45524fc2fcc3c9fc + md5: d8d30923ccee7525704599efd722aa16 + depends: + - python >=3.10 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/tzdata?source=compressed-mapping + size: 147315 + timestamp: 1775223532978 +- conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.10-8_cp310.conda + build_number: 8 + sha256: 7ad76fa396e4bde336872350124c0819032a9e8a0a40590744ff9527b54351c1 + md5: 05e00f3b21e88bb3d658ac700b2ce58c + constrains: + - python 3.10.* *_cpython + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6999 + timestamp: 1752805924192 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pytokens-0.4.1-py310h139afa4_1.conda + sha256: bba4018e01f174aeb85caf951be1773ae51e0fa4f10ca25b0ef49ae661c7f1cd + md5: 991ebcb767bdca1d395249d6c82601f6 + depends: + - python + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pytokens?source=hash-mapping + size: 261963 + timestamp: 1771613610705 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pytokens-0.4.1-py310haea493c_1.conda + sha256: fefcfc92b26310d581c035f786faef80376540cdbcce1efc8600dc5afa0d8788 + md5: c05e879c10805b1d1f9ee616e7f246da + depends: + - python + - __osx >=11.0 + - python 3.10.* *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pytokens?source=hash-mapping + size: 159851 + timestamp: 1771613714816 +- conda: https://conda.anaconda.org/conda-forge/noarch/pytz-2026.1.post1-pyhcf101f3_0.conda + sha256: d35c15c861d5635db1ba847a2e0e7de4c01994999602db1f82e41b5935a9578a + md5: f8a489f43a1342219a3a4d69cecc6b25 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/pytz?source=compressed-mapping + size: 201725 + timestamp: 1773679724369 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0.conda + sha256: f23e0b5432c6d338876eca664deeb360949062ce026ddb65bcb1f31643452354 + md5: 4c441eff2be2e65bd67765c5642051c5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - numpy >=1.19,<3 + - numpy >=1.23,<3 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pywavelets?source=hash-mapping + size: 3689433 + timestamp: 1733419497834 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pywavelets-1.8.0-py310hc12b6d3_0.conda + sha256: 8903f186a2660042fa3d5656125f35e4530a444942c1854e60d8e82ae858a58b + md5: 221075503af39e63f6124873908ad7f0 + depends: + - __osx >=11.0 + - numpy >=1.19,<3 + - numpy >=1.23,<3 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pywavelets?source=hash-mapping + size: 3609933 + timestamp: 1733419504479 +- conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py310h3406613_1.conda + sha256: f23de6cc72541c6081d3d27482dbc9fc5dd03be93126d9155f06d0cf15d6e90e + md5: 2160894f57a40d2d629a34ee8497795f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - yaml >=0.2.5,<0.3.0a0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyyaml?source=hash-mapping + size: 176522 + timestamp: 1770223379599 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.3-py310hb46c203_1.conda + sha256: 22f0c040a56bfdb9dfa2072129b67db3f8bf738e52b243573316443d1da853a8 + md5: cdd081d256a691c8adc3cffad215988c + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - yaml >=0.2.5,<0.3.0a0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/pyyaml?source=hash-mapping + size: 163966 + timestamp: 1770223747482 +- conda: https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda + sha256: 776363493bad83308ba30bcb88c2552632581b143e8ee25b1982c8c743e73abc + md5: 353823361b1d27eb3960efb076dfcaf6 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc-ng >=12 + - libstdcxx-ng >=12 + license: LicenseRef-Qhull + purls: [] + size: 552937 + timestamp: 1720813982144 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/qhull-2020.2-h420ef59_5.conda + sha256: 873ac689484262a51fd79bc6103c1a1bedbf524924d7f0088fb80703042805e4 + md5: 6483b1f59526e05d7d894e466b5b6924 + depends: + - __osx >=11.0 + - libcxx >=16 + license: LicenseRef-Qhull + purls: [] + size: 516376 + timestamp: 1720814307311 +- conda: https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.9.2-h994258b_2.conda + sha256: 3976b81325db27edb335df3d500b1f9e82848efe3ffded9816e0ef572001866c + md5: 0a9425304ca2409b783f39dec84a8526 + depends: + - __glibc >=2.17,<3.0.a0 + - alsa-lib >=1.2.14,<1.3.0a0 + - dbus >=1.16.2,<2.0a0 + - double-conversion >=3.3.1,<3.4.0a0 + - fontconfig >=2.15.0,<3.0a0 + - fonts-conda-ecosystem + - harfbuzz >=11.5.0 + - icu >=75.1,<76.0a0 + - krb5 >=1.21.3,<1.22.0a0 + - libclang-cpp21.1 >=21.1.1,<21.2.0a0 + - libclang13 >=21.1.1 + - libcups >=2.3.3,<2.4.0a0 + - libdrm >=2.4.125,<2.5.0a0 + - libegl >=1.7.0,<2.0a0 + - libfreetype >=2.14.1 + - libfreetype6 >=2.14.1 + - libgcc >=14 + - libgl >=1.7.0,<2.0a0 + - libglib >=2.86.0,<3.0a0 + - libjpeg-turbo >=3.1.0,<4.0a0 + - libllvm21 >=21.1.1,<21.2.0a0 + - libpng >=1.6.50,<1.7.0a0 + - libpq >=17.6,<18.0a0 + - libsqlite >=3.50.4,<4.0a0 + - libstdcxx >=14 + - libtiff >=4.7.0,<4.8.0a0 + - libwebp-base >=1.6.0,<2.0a0 + - libxcb >=1.17.0,<2.0a0 + - libxkbcommon >=1.11.0,<2.0a0 + - libxml2 + - libxml2-16 >=2.14.6 + - libzlib >=1.3.1,<2.0a0 + - openssl >=3.5.2,<4.0a0 + - pcre2 >=10.46,<10.47.0a0 + - wayland >=1.24.0,<2.0a0 + - xcb-util >=0.4.1,<0.5.0a0 + - xcb-util-cursor >=0.1.5,<0.2.0a0 + - xcb-util-image >=0.4.0,<0.5.0a0 + - xcb-util-keysyms >=0.4.1,<0.5.0a0 + - xcb-util-renderutil >=0.3.10,<0.4.0a0 + - xcb-util-wm >=0.4.2,<0.5.0a0 + - xorg-libice >=1.1.2,<2.0a0 + - xorg-libsm >=1.2.6,<2.0a0 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxcomposite >=0.4.6,<1.0a0 + - xorg-libxcursor >=1.2.3,<2.0a0 + - xorg-libxdamage >=1.1.6,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrandr >=1.5.4,<2.0a0 + - xorg-libxtst >=1.2.5,<2.0a0 + - xorg-libxxf86vm >=1.1.6,<2.0a0 + - zstd >=1.5.7,<1.6.0a0 + constrains: + - qt 6.9.2 + license: LGPL-3.0-only + license_family: LGPL + purls: [] + size: 52549323 + timestamp: 1758022799077 +- conda: https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.8.1-h1fbca29_0.conda + sha256: cf550bbc8e5ebedb6dba9ccaead3e07bd1cb86b183644a4c853e06e4b3ad5ac7 + md5: d83958768626b3c8471ce032e28afcd3 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - __glibc >=2.17 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 5595970 + timestamp: 1772540833621 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.8.1-h8246384_0.conda + sha256: 925e35b71fe513e0380ecd2fe137e3f4f248bf7ce4bad96946c7c704b7a50d26 + md5: 4706a8a71474c692482c3f86c2175454 + depends: + - __osx >=11.0 + constrains: + - __osx >=11.0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 886953 + timestamp: 1772541394570 +- conda: https://conda.anaconda.org/conda-forge/linux-64/re2-2025.11.05-h5301d42_0.conda + sha256: 2f225ddf4a274743045aded48053af65c31721e797a45beed6774fdc783febfb + md5: 0227d04521bc3d28c7995c7e1f99a721 + depends: + - libre2-11 2025.11.05 h7b12aa8_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 27316 + timestamp: 1762397780316 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2025.11.05-h64b956e_0.conda + sha256: 29c4bceb6b4530bac6820c30ba5a2f53fd26ed3e7003831ecf394e915b975fbc + md5: 1b35e663ed321840af65e7c5cde419f2 + depends: + - libre2-11 2025.11.05 h91c62da_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 27422 + timestamp: 1762398340843 +- conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + sha256: 12ffde5a6f958e285aa22c191ca01bbd3d6e710aa852e00618fa6ddc59149002 + md5: d7d95fc8287ea7bf33e0e7116d2b95ec + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - ncurses >=6.5,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 345073 + timestamp: 1765813471974 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + sha256: a77010528efb4b548ac2a4484eaf7e1c3907f2aec86123ed9c5212ae44502477 + md5: f8381319127120ce51e081dce4865cf4 + depends: + - __osx >=11.0 + - ncurses >=6.5,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 313930 + timestamp: 1765813902568 +- conda: https://conda.anaconda.org/conda-forge/noarch/readme_renderer-44.0-pyhd8ed1ab_1.conda + sha256: 66f3adf6aaabf977cfcc22cb65607002b1de4a22bc9fac7be6bb774bc6f85a3a + md5: c58dd5d147492671866464405364c0f1 + depends: + - cmarkgfm >=0.8.0 + - docutils >=0.21.2 + - nh3 >=0.2.14 + - pygments >=2.5.1 + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/readme-renderer?source=hash-mapping + size: 17481 + timestamp: 1734339765256 +- conda: https://conda.anaconda.org/conda-forge/noarch/redis-py-7.4.0-pyhd8ed1ab_0.conda + sha256: ec4ea5805d1d845159e433c72e04bf9ecc4bb3977285b59006071258a5b6f648 + md5: f03076f8b769d63b42018c739b9d65ee + depends: + - async-timeout >=4.0.3 + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/redis?source=compressed-mapping + size: 263076 + timestamp: 1774356425360 +- conda: https://conda.anaconda.org/conda-forge/noarch/referencing-0.37.0-pyhcf101f3_0.conda + sha256: 0577eedfb347ff94d0f2fa6c052c502989b028216996b45c7f21236f25864414 + md5: 870293df500ca7e18bedefa5838a22ab + depends: + - attrs >=22.2.0 + - python >=3.10 + - rpds-py >=0.7.0 + - typing_extensions >=4.4.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/referencing?source=hash-mapping + size: 51788 + timestamp: 1760379115194 +- conda: https://conda.anaconda.org/conda-forge/noarch/reportlab-4.4.10-pyhcf101f3_1.conda + sha256: 61c9a2a7b4decaa300e5076ccdea874aad77ff13a55b7fd9b95744dba060eec9 + md5: c89e6c0a10804fb87536a3980685990d + depends: + - charset-normalizer + - pillow >=9 + - python >=3.9,<4 + - rlpycairo >=0.2.0,<1 + - freetype-py >=2.3.0,<2.4 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/reportlab?source=hash-mapping + size: 1718577 + timestamp: 1771171345253 +- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.33.1-pyhcf101f3_0.conda + sha256: c0249bc4bf4c0e8e06d0e7b4d117a5d593cc4ab2144d5006d6d47c83cb0af18e + md5: 10afbb4dbf06ff959ad25a92ccee6e59 + depends: + - python >=3.10 + - certifi >=2023.5.7 + - charset-normalizer >=2,<4 + - idna >=2.5,<4 + - urllib3 >=1.26,<3 + - python + constrains: + - chardet >=3.0.2,<6 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/requests?source=compressed-mapping + size: 63712 + timestamp: 1774894783063 +- conda: https://conda.anaconda.org/conda-forge/noarch/requests-toolbelt-1.0.0-pyhd8ed1ab_1.conda + sha256: c0b815e72bb3f08b67d60d5e02251bbb0164905b5f72942ff5b6d2a339640630 + md5: 66de8645e324fda0ea6ef28c2f99a2ab + depends: + - python >=3.9 + - requests >=2.0.1,<3.0.0 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/requests-toolbelt?source=hash-mapping + size: 44285 + timestamp: 1733734886897 +- conda: https://conda.anaconda.org/conda-forge/noarch/rfc3986-2.0.0-pyhd8ed1ab_1.conda + sha256: d617373ba1a5108336cb87754d030b9e384dcf91796d143fa60fe61e76e5cfb0 + md5: 43e14f832d7551e5a8910672bfc3d8c6 + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/rfc3986?source=hash-mapping + size: 38028 + timestamp: 1733921806657 +- conda: https://conda.anaconda.org/conda-forge/noarch/rich-15.0.0-pyhcf101f3_0.conda + sha256: 3d6ba2c0fcdac3196ba2f0615b4104e532525ffa1335b50a2878be5ff488814a + md5: 0242025a3c804966bf71aa04eee82f66 + depends: + - markdown-it-py >=2.2.0 + - pygments >=2.13.0,<3.0.0 + - python >=3.10 + - typing_extensions >=4.0.0,<5.0.0 + - python + license: MIT + purls: + - pkg:pypi/rich?source=hash-mapping + size: 208577 + timestamp: 1775991661559 +- conda: https://conda.anaconda.org/conda-forge/noarch/rich-toolkit-0.19.7-pyhcf101f3_0.conda + sha256: 9cf3b9a083ebdee70ef5a48fbe409d91d2a8c4eed3c581a7b33b4d5ca7c813be + md5: 8b1a4d854f9a4ea1e4abc93ccab0ded9 + depends: + - python >=3.10 + - rich >=13.7.1 + - click >=8.1.7 + - typing_extensions >=4.12.2 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/rich-toolkit?source=compressed-mapping + size: 32484 + timestamp: 1771977622605 +- conda: https://conda.anaconda.org/conda-forge/noarch/rlpycairo-0.4.0-pyh6c17108_0.conda + sha256: 8d993b1a7d869855a1f6358dcc3de08dbeda9263d8c852d44bfc3900701c1e6c + md5: cc70086eaf08be7f62fd44842c013916 + depends: + - freetype-py >=2.3 + - pycairo >=1.20.0 + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/rlpycairo?source=hash-mapping + size: 15558 + timestamp: 1756864268077 +- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.30.0-py310hd8f68c5_0.conda + sha256: ac1132a9344c77e19bbbdb966668cf73a861ceec7b075858a52c8e961fb8ea9d + md5: 61ff3f8e00c63bb66903636d0197e962 + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + constrains: + - __glibc >=2.17 + license: MIT + license_family: MIT + purls: + - pkg:pypi/rpds-py?source=hash-mapping + size: 382893 + timestamp: 1764543243162 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.30.0-py310hf3301a5_0.conda + sha256: 842bfe772072b6ca1ebc286e9fcbe95717d1528ec921687076d707d02d64fa61 + md5: 38645c71e72b3cc640eb508d05a28d0c + depends: + - python + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + constrains: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/rpds-py?source=hash-mapping + size: 357454 + timestamp: 1764543222201 +- conda: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9.1-pyhd8ed1ab_0.conda + sha256: e32e94e7693d4bc9305b36b8a4ef61034e0428f58850ebee4675978e3c2e5acf + md5: 58958bb50f986ac0c46f73b6e290d5fe + depends: + - pyasn1 >=0.1.3 + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/rsa?source=hash-mapping + size: 31709 + timestamp: 1744825527634 +- conda: https://conda.anaconda.org/conda-forge/noarch/ruamel.yaml-0.19.1-pyhcf101f3_0.conda + sha256: b48bebe297a63ae60f52e50be328262e880702db4d9b4e86731473ada459c2a1 + md5: 06ad944772941d5dae1e0d09848d8e49 + depends: + - python >=3.10 + - ruamel.yaml.clib >=0.2.15 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/ruamel-yaml?source=hash-mapping + size: 98448 + timestamp: 1767538149184 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.15-py310h139afa4_1.conda + sha256: 242ff560883541acc447b4fb11f1c6c0a4e91479b70c8ce895aee5d9a8ce346a + md5: a7e3055859e9162d5f7adb9b3c229d56 + depends: + - python + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ruamel-yaml-clib?source=hash-mapping + size: 152839 + timestamp: 1766159514181 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruamel.yaml.clib-0.2.15-py310haea493c_1.conda + sha256: 1ce4e1e335f418d02dc2c695e276535eedf42eba061cff3668812f3e11e4e89a + md5: bc5a718669756fbadccddbfe591579f8 + depends: + - python + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ruamel-yaml-clib?source=hash-mapping + size: 133365 + timestamp: 1766159575543 +- conda: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.6.0-h8399546_1.conda + sha256: f5b294ce9b40d15a4bc31b315364459c0d702dd3e8751fe8735c88ac6a9ddc67 + md5: 8dbc626b1b11e7feb40a14498567b954 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - openssl >=3.5.4,<4.0a0 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 393615 + timestamp: 1762176592236 +- pypi: git+https://github.com/scikit-beam/scikit-beam?rev=main#dbe344435f6b12749104b868f7e251624acee565 + name: scikit-beam + version: 0.0.27.post1+gdbe3444 + requires_dist: + - fabio + - lmfit + - numpy>=1.15 + - pyfai + - scikit-image + - scipy + - six +- conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.25.2-py310h0158d43_2.conda + sha256: 34593b03ba5de16ce231a6485caa295fbdca251a8cb3585ec5db1ffe9df6b063 + md5: e8e3404c2d4135193013fbbe9bba60a5 + depends: + - __glibc >=2.17,<3.0.a0 + - imageio >=2.33,!=2.35.0 + - lazy-loader >=0.4 + - libgcc >=14 + - libstdcxx >=14 + - networkx >=3.0 + - numpy >=1.21,<3 + - numpy >=1.24 + - packaging >=21 + - pillow >=10.1 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + - pywavelets >=1.6 + - scipy >=1.11.4 + - tifffile >=2022.8.12 + constrains: + - astropy-base >=6.0 + - pywavelets >=1.6 + - scikit-learn >=1.2 + - pyamg >=5.2 + - matplotlib-base >=3.7 + - numpy >=1.24 + - dask-core >=2023.2.0,!=2024.8.0 + - pooch >=1.6.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/scikit-image?source=hash-mapping + size: 10561680 + timestamp: 1757197302869 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-image-0.25.2-py310h25f4b65_2.conda + sha256: 2b853aa6c2f234dd21c933b367092f105cf61ed6780e4a2152a5cc748efd5db0 + md5: 60e04ee3b55d262d969d78da3aa4fe8c + depends: + - __osx >=11.0 + - imageio >=2.33,!=2.35.0 + - lazy-loader >=0.4 + - libcxx >=19 + - networkx >=3.0 + - numpy >=1.21,<3 + - numpy >=1.24 + - packaging >=21 + - pillow >=10.1 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + - pywavelets >=1.6 + - scipy >=1.11.4 + - tifffile >=2022.8.12 + constrains: + - scikit-learn >=1.2 + - pywavelets >=1.6 + - numpy >=1.24 + - matplotlib-base >=3.7 + - dask-core >=2023.2.0,!=2024.8.0 + - pyamg >=5.2 + - astropy-base >=6.0 + - pooch >=1.6.0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/scikit-image?source=hash-mapping + size: 9952636 + timestamp: 1757197945646 +- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.2-py310h1d65ade_0.conda + sha256: 4cb98641f870666d365594013701d5691205a0fe81ac3ba7778a23b1cc2caa8e + md5: 8c29cd33b64b2eb78597fa28b5595c8d + depends: + - __glibc >=2.17,<3.0.a0 + - libblas >=3.9.0,<4.0a0 + - libcblas >=3.9.0,<4.0a0 + - libgcc >=13 + - libgfortran + - libgfortran5 >=13.3.0 + - liblapack >=3.9.0,<4.0a0 + - libstdcxx >=13 + - numpy <2.5 + - numpy >=1.19,<3 + - numpy >=1.23.5 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/scipy?source=hash-mapping + size: 16417101 + timestamp: 1739791865060 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.15.2-py310h32ab4ed_0.conda + sha256: f6ff2c1ba4775300199e8bc0331d2e2ccb5906f58f3835c5426ddc591c9ad7bf + md5: a389f540c808b22b3c696d7aea791a41 + depends: + - __osx >=11.0 + - libblas >=3.9.0,<4.0a0 + - libcblas >=3.9.0,<4.0a0 + - libcxx >=18 + - libgfortran >=5 + - libgfortran5 >=13.2.0 + - liblapack >=3.9.0,<4.0a0 + - numpy <2.5 + - numpy >=1.19,<3 + - numpy >=1.23.5 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/scipy?source=hash-mapping + size: 13507343 + timestamp: 1739792089317 +- conda: https://conda.anaconda.org/conda-forge/linux-64/secretstorage-3.4.1-py310hff52083_0.conda + sha256: c803456ee0099e12808b54d4b8e067c70bc0b1bbfd80f21390fbb4f5886d82c3 + md5: 86838e6b20d008f9039e9fca12a94eba + depends: + - cryptography >=2.0 + - dbus + - jeepney >=0.6 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/secretstorage?source=hash-mapping + size: 27851 + timestamp: 1763045364984 +- conda: https://conda.anaconda.org/conda-forge/noarch/sentinels-1.0.0-py_1.tar.bz2 + sha256: 10cf4385de961d6e778a9468c5f65930948c25548e0668799da0ff707b84ebe7 + md5: b1e531273d250d72ad2601c0cfa65d7a + depends: + - python + license: BSD 3-Clause + license_family: BSD + purls: + - pkg:pypi/sentinels?source=hash-mapping + size: 5052 + timestamp: 1535321278716 +- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.1-pyh332efcf_0.conda + sha256: 82088a6e4daa33329a30bc26dc19a98c7c1d3f05c0f73ce9845d4eab4924e9e1 + md5: 8e194e7b992f99a5015edbd4ebd38efd + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/setuptools?source=compressed-mapping + size: 639697 + timestamp: 1773074868565 +- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda + sha256: 2161ac35fc22770b248bab0be2cc3b5bd765f528a9e60e7f3be784fd8d0d605a + md5: e2e4d7094d0580ccd62e2a41947444f3 + depends: + - importlib-metadata + - packaging >=20.0 + - python >=3.10 + - setuptools >=45 + - tomli >=1.0.0 + - typing-extensions + license: MIT + license_family: MIT + purls: + - pkg:pypi/setuptools-scm?source=hash-mapping + size: 52539 + timestamp: 1760965125925 +- conda: https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py310hc8bbb35_2.conda + sha256: 292acdbb958db8141f1abc7b7a77970a930a9f73abeb0c6be399c50505c715ab + md5: 0937b0e88a24c82e02d5b714acc9957c + depends: + - __glibc >=2.17,<3.0.a0 + - geos >=3.14.1,<3.14.2.0a0 + - libgcc >=14 + - numpy >=1.21,<3 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/shapely?source=hash-mapping + size: 540156 + timestamp: 1762523617913 +- conda: https://conda.anaconda.org/conda-forge/noarch/shellingham-1.5.4-pyhd8ed1ab_2.conda + sha256: 1d6534df8e7924d9087bd388fbac5bd868c5bf8971c36885f9f016da0657d22b + md5: 83ea3a2ddb7a75c1b09cea582aa4f106 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/shellingham?source=hash-mapping + size: 15018 + timestamp: 1762858315311 +- pypi: https://files.pythonhosted.org/packages/5f/bc/6a9aaa3e605cfe7e73856c9067ba42cd6289660eb7a6d31970c7f9cca725/silx-2.2.2-cp310-cp310-macosx_10_9_universal2.whl + name: silx + version: 2.2.2 + sha256: 4eb1f8829323703133b3456cc3c2dd0a08b631b4cf4752bd3cefc2c591ea14ee + requires_dist: + - numpy + - packaging + - h5py + - fabio>=0.9 + - pyopencl ; extra == 'full' + - mako ; extra == 'full' + - qtconsole ; extra == 'full' + - matplotlib>=3.1.0 ; extra == 'full' + - pyopengl ; extra == 'full' + - python-dateutil ; extra == 'full' + - pyqt5 ; extra == 'full' + - hdf5plugin ; extra == 'full' + - scipy ; extra == 'full' + - pillow ; extra == 'full' + - nbsphinx ; extra == 'doc' + - pydata-sphinx-theme ; extra == 'doc' + - sphinx-design ; extra == 'doc' + - sphinx-autodoc-typehints ; extra == 'doc' + - sphinx<8.2 ; extra == 'doc' + - pillow ; extra == 'doc' + - pandoc ; extra == 'doc' + - pytest>=6.0 ; extra == 'test' + - pytest-xvfb ; extra == 'test' + - pytest-mock ; extra == 'test' + - scipy>=1.10 ; extra == 'test' + - pooch ; extra == 'test' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/65/8a/2b46cb76762468deea3dbcc5370c858d60e5b7bdaf09bdccd0169707147c/silx-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: silx + version: 2.2.2 + sha256: ccf4239f9f625d00d69210ccdde27cbed5e6e5f5ffa4e7e2477c4f97c3ce837e + requires_dist: + - numpy + - packaging + - h5py + - fabio>=0.9 + - pyopencl ; extra == 'full' + - mako ; extra == 'full' + - qtconsole ; extra == 'full' + - matplotlib>=3.1.0 ; extra == 'full' + - pyopengl ; extra == 'full' + - python-dateutil ; extra == 'full' + - pyqt5 ; extra == 'full' + - hdf5plugin ; extra == 'full' + - scipy ; extra == 'full' + - pillow ; extra == 'full' + - sphinx-autodoc-typehints ; extra == 'doc' + - sphinx-design ; extra == 'doc' + - pandoc ; extra == 'doc' + - pillow ; extra == 'doc' + - nbsphinx ; extra == 'doc' + - sphinx<8.2 ; extra == 'doc' + - pydata-sphinx-theme ; extra == 'doc' + - pytest>=6.0 ; extra == 'test' + - pytest-xvfb ; extra == 'test' + - pytest-mock ; extra == 'test' + - scipy>=1.10 ; extra == 'test' + - pooch ; extra == 'test' + requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda + sha256: 458227f759d5e3fcec5d9b7acce54e10c9e1f4f4b7ec978f3bfd54ce4ee9853d + md5: 3339e3b65d58accf4ca4fb8748ab16b3 + depends: + - python >=3.9 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/six?source=hash-mapping + size: 18455 + timestamp: 1753199211006 +- conda: https://conda.anaconda.org/conda-forge/noarch/slicerator-1.1.0-pyhd8ed1ab_1.conda + sha256: 5340c36cb62b7c8a22c267254c037302fea2670a4fb9d29e10ba36565e2a5510 + md5: 102f1100ad3dcbcf57f789600c9c015a + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/slicerator?source=hash-mapping + size: 15755 + timestamp: 1734051114500 +- conda: https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda + sha256: 48f3f6a76c34b2cfe80de9ce7f2283ecb55d5ed47367ba91e8bb8104e12b8f11 + md5: 98b6c9dc80eb87b2519b97bcf7e578dd + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - libstdcxx >=14 + - libgcc >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 45829 + timestamp: 1762948049098 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.2-hada39a4_1.conda + sha256: cb9305ede19584115f43baecdf09a3866bfcd5bcca0d9e527bd76d9a1dbe2d8d + md5: fca4a2222994acd7f691e57f94b750c5 + depends: + - libcxx >=19 + - __osx >=11.0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 38883 + timestamp: 1762948066818 +- conda: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_2.conda + sha256: dce518f45e24cd03f401cb0616917773159a210c19d601c5f2d4e0e5879d30ad + md5: 03fe290994c5e4ec17293cfb6bdce520 + depends: + - python >=3.10 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/sniffio?source=compressed-mapping + size: 15698 + timestamp: 1762941572482 +- conda: https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda + sha256: 17007a4cfbc564dc3e7310dcbe4932c6ecb21593d4fec3c68610720f19e73fb2 + md5: 755cf22df8693aa0d1aec1c123fa5863 + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/snowballstemmer?source=hash-mapping + size: 73009 + timestamp: 1747749529809 +- conda: https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda + sha256: d1e3e06b5cf26093047e63c8cc77b70d970411c5cbc0cb1fad461a8a8df599f7 + md5: 0401a17ae845fa72c7210e206ec5647d + depends: + - python >=3.9 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/sortedcontainers?source=hash-mapping + size: 28657 + timestamp: 1738440459037 +- conda: https://conda.anaconda.org/conda-forge/noarch/sparse-0.17.0-pyhcf101f3_0.conda + sha256: 8406de1065e1d4ba206d611dae9a03de7f226f486ce9fb02ab0f29c3bd031a6a + md5: 1b59de14a7e5888f939611e1fe329e00 + depends: + - python >=3.10 + - numpy >=1.17 + - numba >=0.49 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/sparse?source=hash-mapping + size: 121488 + timestamp: 1747799051402 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda + sha256: 3228eb332ce159f031d4b7d2e08117df973b0ba3ddcb8f5dbb7f429f71d27ea1 + md5: 1a3281a0dc355c02b5506d87db2d78ac + depends: + - alabaster >=0.7.14 + - babel >=2.13 + - colorama >=0.4.6 + - docutils >=0.20,<0.22 + - imagesize >=1.3 + - jinja2 >=3.1 + - packaging >=23.0 + - pygments >=2.17 + - python >=3.10 + - requests >=2.30.0 + - snowballstemmer >=2.2 + - sphinxcontrib-applehelp >=1.0.7 + - sphinxcontrib-devhelp >=1.0.6 + - sphinxcontrib-htmlhelp >=2.0.6 + - sphinxcontrib-jsmath >=1.0.1 + - sphinxcontrib-qthelp >=1.0.6 + - sphinxcontrib-serializinghtml >=1.1.9 + - tomli >=2.0 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinx?source=hash-mapping + size: 1387076 + timestamp: 1733754175386 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda + sha256: 8cd892e49cb4d00501bc4439fb0c73ca44905f01a65b2b7fa05ba0e8f3924f19 + md5: bf22cb9c439572760316ce0748af3713 + depends: + - python >=3.9 + - sphinx >=1.8 + license: MIT + license_family: MIT + purls: + - pkg:pypi/sphinx-copybutton?source=hash-mapping + size: 17893 + timestamp: 1734573117732 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-3.1.0-pyha770c72_0.conda + sha256: 1d57a0cd74ecc0e5dc006f6591145d1abb6658464919d4aeb163d3db714f80e6 + md5: cede6bc99a0253fa676f03cfdc666d57 + depends: + - docutils >0.18,<0.22 + - python >=3.8 + - sphinx >=6,<9 + - sphinxcontrib-jquery >=4,<5 + license: MIT + license_family: MIT + purls: + - pkg:pypi/sphinx-rtd-theme?source=hash-mapping + size: 4626882 + timestamp: 1769194859566 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda + sha256: d7433a344a9ad32a680b881c81b0034bc61618d12c39dd6e3309abeffa9577ba + md5: 16e3f039c0aa6446513e94ab18a8784b + depends: + - python >=3.9 + - sphinx >=5 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-applehelp?source=hash-mapping + size: 29752 + timestamp: 1733754216334 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda + sha256: 55d5076005d20b84b20bee7844e686b7e60eb9f683af04492e598a622b12d53d + md5: 910f28a05c178feba832f842155cbfff + depends: + - python >=3.9 + - sphinx >=5 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-devhelp?source=hash-mapping + size: 24536 + timestamp: 1733754232002 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda + sha256: c1492c0262ccf16694bdcd3bb62aa4627878ea8782d5cd3876614ffeb62b3996 + md5: e9fb3fe8a5b758b4aff187d434f94f03 + depends: + - python >=3.9 + - sphinx >=5 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-htmlhelp?source=hash-mapping + size: 32895 + timestamp: 1733754385092 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda + sha256: 69c08d18663b57ebc8e4187c64c8d29b10996bb465a515cd288d87b6f2f52a5e + md5: 403185829255321ea427333f7773dd1f + depends: + - python >=3.9 + - sphinx >=1.8 + license: 0BSD AND MIT + purls: + - pkg:pypi/sphinxcontrib-jquery?source=hash-mapping + size: 112964 + timestamp: 1734344603903 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda + sha256: 578bef5ec630e5b2b8810d898bbbf79b9ae66d49b7938bcc3efc364e679f2a62 + md5: fa839b5ff59e192f411ccc7dae6588bb + depends: + - python >=3.9 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-jsmath?source=hash-mapping + size: 10462 + timestamp: 1733753857224 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda + sha256: c664fefae4acdb5fae973bdde25836faf451f41d04342b64a358f9a7753c92ca + md5: 00534ebcc0375929b45c3039b5ba7636 + depends: + - python >=3.9 + - sphinx >=5 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-qthelp?source=hash-mapping + size: 26959 + timestamp: 1733753505008 +- conda: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda + sha256: 64d89ecc0264347486971a94487cb8d7c65bfc0176750cf7502b8a272f4ab557 + md5: 3bc61f7161d28137797e038263c04c54 + depends: + - python >=3.9 + - sphinx >=5 + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/sphinxcontrib-serializinghtml?source=hash-mapping + size: 28669 + timestamp: 1733750596111 +- conda: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.49-py310h139afa4_0.conda + sha256: 0d3d70074ac65e1ffd8207c8da86b22cf458a15f92b2cd377967072e52d90673 + md5: 8db8818f733deca2d5f7041fe4819ee0 + depends: + - python + - greenlet !=0.4.17 + - typing-extensions >=4.6.0 + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/sqlalchemy?source=hash-mapping + size: 2993753 + timestamp: 1775241332871 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.49-py310haea493c_0.conda + sha256: 5e880a409e43bf1b1597c802a20e8fc6fe7d4ccbb768dabe4f51e3462e78e446 + md5: eb28dd97ca20935412753338dc6bfec7 + depends: + - python + - greenlet !=0.4.17 + - typing-extensions >=4.6.0 + - __osx >=11.0 + - python 3.10.* *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/sqlalchemy?source=hash-mapping + size: 2982964 + timestamp: 1775241454532 +- conda: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 + md5: b1b505328da7a6b246787df4b5a49fbc + depends: + - asttokens + - executing + - pure_eval + - python >=3.9 + license: MIT + license_family: MIT + purls: + - pkg:pypi/stack-data?source=hash-mapping + size: 26988 + timestamp: 1733569565672 +- conda: https://conda.anaconda.org/conda-forge/noarch/stamina-26.1.0-pyhcf101f3_1.conda + sha256: e85ebb8bc145d925f00d92fda9b9578b7a3fec17a80fdddf416c3ac75f8ac55d + md5: c06c071acce8103b854d9434a249bd80 + depends: + - python >=3.10 + - tenacity + - python + license: MIT + purls: + - pkg:pypi/stamina?source=hash-mapping + size: 24175 + timestamp: 1776172479398 +- conda: https://conda.anaconda.org/conda-forge/noarch/starlette-1.0.0-pyhcf101f3_0.conda + sha256: 1a1dc376e95491f4b2003f4428e6f7caf4a3de0ef9869248b29dcc9704c34b39 + md5: 8fbd5b6879350f1b5303c1a652d4b781 + depends: + - anyio >=3.6.2,<5 + - python >=3.10 + - typing_extensions >=4.10.0 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/starlette?source=compressed-mapping + size: 63717 + timestamp: 1774215956101 +- conda: https://conda.anaconda.org/conda-forge/noarch/suitcase-mongo-0.7.0-pyhd8ed1ab_0.conda + sha256: 26b42fb653ccb74243d4e1e73950edf2cfc1c79b2f6720797cf17b72d617c36f + md5: 30068d1e506e0c54b9954d44dfcfb1bf + depends: + - event-model >=1.8.0 + - packaging + - pymongo + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/suitcase-mongo?source=hash-mapping + size: 26416 + timestamp: 1737651184394 +- conda: https://conda.anaconda.org/conda-forge/linux-64/svt-av1-4.0.1-hecca717_0.conda + sha256: 4a1d2005153b9454fc21c9bad1b539df189905be49e851ec62a6212c2e045381 + md5: 2a2170a3e5c9a354d09e4be718c43235 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 2619743 + timestamp: 1769664536467 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-4.0.1-h0cb729a_0.conda + sha256: bdef3c1c4d2a396ad4f7dc64c5e9a02d4c5a21ff93ed07a33e49574de5d2d18d + md5: 8badc3bf16b62272aa2458f138223821 + depends: + - __osx >=11.0 + - libcxx >=19 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 1456245 + timestamp: 1769664727051 +- conda: https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda + sha256: 6b549360f687ee4d11bf85a6d6a276a30f9333df1857adb0fe785f0f8e9bcd60 + md5: f88bb644823094f436792f80fba3207e + depends: + - python >=3.10 + - python + license: BSD-2-Clause + license_family: BSD + purls: + - pkg:pypi/tblib?source=hash-mapping + size: 19397 + timestamp: 1762956379123 +- conda: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.1.4-pyhcf101f3_0.conda + sha256: 32e75900d6a094ffe4290a8c9f1fa15744d9da8ff617aba4acaa0f057a065c34 + md5: 043f0599dc8aa023369deacdb5ac24eb + depends: + - python >=3.10 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/tenacity?source=hash-mapping + size: 31404 + timestamp: 1770510172846 +- conda: https://conda.anaconda.org/conda-forge/noarch/tifffile-2025.5.10-pyhd8ed1ab_0.conda + sha256: 3ea3854eb8a41bbb128598a5d5bc9aed52446d20d2f1bd6e997c2387074202e4 + md5: 1fdb801f28bf4987294c49aaa314bf5e + depends: + - imagecodecs >=2024.12.30 + - numpy >=1.19.2 + - python >=3.10 + constrains: + - matplotlib-base >=3.3 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/tifffile?source=hash-mapping + size: 179592 + timestamp: 1746986641678 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.1.0a117-hd8ed1ab_0.conda + noarch: python + sha256: 6ac401d6e67170e93a852a73f1ebc5451b8a897b7b3e65475bacab2dfd618c1d + md5: 307b7f602a906fadb10df9c9993b174c + depends: + - tiled-client 0.1.0a117 hd8ed1ab_0 + - tiled-formats 0.1.0a117 hd8ed1ab_0 + - tiled-server 0.1.0a117 hd8ed1ab_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8127 + timestamp: 1713243284710 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-0.2.1-pyhd8ed1ab_0.conda + sha256: 3aa07660d71fde13664c60f81170326e697c100ab016f646be10256163e919bc + md5: 9e598dfd3558944243d2ddf80deb5297 + depends: + - python >=3.10 + - tiled-client 0.2.1 pyhd8ed1ab_0 + - tiled-formats 0.2.1 pyhd8ed1ab_0 + - tiled-server 0.2.1 pyhd8ed1ab_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8845 + timestamp: 1762996791496 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.1.0a117-pyhd8ed1ab_0.conda + sha256: 52e3d5d20877780f63815b211a7c5cb061175dcf803ac1299980ad78c65a0211 + md5: 4bfd18dec32a903a921ab0a460c128ea + depends: + - appdirs + - blosc + - click !=8.1.0 + - dask + - httpx >=0.20.0 + - jsonschema + - lz4 + - msgpack-python >=1.0.0 + - ndindex + - numpy + - pandas + - pyarrow + - python >=3.8 + - pyyaml + - sparse >=0.13.0 + - typer + - xarray + - zstandard + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/tiled?source=hash-mapping + size: 1343494 + timestamp: 1713243271592 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-base-0.2.1-pyhd8ed1ab_0.conda + sha256: 2b1e7170f17bf0b51d0945778d87be827afbbb7b38c4b75950da93833aad6e6c + md5: b26dd9dd95985e01ac661b6d324c9729 + depends: + - appdirs + - awkward + - click !=8.1.0 + - dask + - httpx >=0.20.0 + - json-merge-patch + - jsonpatch + - jsonschema + - lz4 + - msgpack-python >=1.0.0 + - ndindex + - numpy + - orjson + - pandas + - pyarrow + - pydantic-settings >=2,<3 + - python >=3.10 + - python-blosc2 + - pyyaml + - sparse >=0.13.0 + - typer + - xarray + - zstandard + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/tiled?source=hash-mapping + size: 1501599 + timestamp: 1762996759482 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.1.0a117-hd8ed1ab_0.conda + noarch: python + sha256: a0584e5f096c516b1c64e5afd72d0935c38fd7e06ed377b059c6ad21b526e407 + md5: 888aeac309f0e238f57626654813089a + depends: + - entrypoints + - heapdict + - starlette + - tiled-base 0.1.0a117 pyhd8ed1ab_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 7733 + timestamp: 1713243277881 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-client-0.2.1-pyhd8ed1ab_0.conda + sha256: da230268892468389beeca262f99249acd287472932219b7a607396f11a92952 + md5: 6d15d1e576b599b6d22456c6c1b82bd1 + depends: + - entrypoints + - platformdirs + - pydantic + - python >=3.10 + - rich + - stamina + - tiled-base 0.2.1 pyhd8ed1ab_0 + - websockets + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8393 + timestamp: 1762996768605 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.1.0a117-hd8ed1ab_0.conda + noarch: python + sha256: c43ee39a1e02c6d9b767066076997c74f5a008d17e774d7137ba3ebd0ae3cf11 + md5: c14dd9b0e24fb2427eee5e66f0185903 + depends: + - h5netcdf + - h5py + - openpyxl + - pillow + - tifffile + - tiled-base 0.1.0a117 pyhd8ed1ab_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 7472 + timestamp: 1713243280238 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-formats-0.2.1-pyhd8ed1ab_0.conda + sha256: 97df9b15cc32dfd441309fc0bbdb7b038eb283eee336037bfff9a812284ebf42 + md5: 8aa98065c5bfe384ac81f2ca3da0bccc + depends: + - h5netcdf + - h5py + - hdf5plugin + - openpyxl + - pillow + - python >=3.10 + - tifffile + - tiled-base 0.2.1 pyhd8ed1ab_0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8233 + timestamp: 1762996776232 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.1.0a117-hd8ed1ab_0.conda + noarch: python + sha256: 38800aae11d68d0008d966020944c6d85081c73091aaab84cecd091e330a2cad + md5: 80b7ac8be7c0a77d0f26f131be05dd06 + depends: + - aiofiles + - alembic + - anyio + - asgi-correlation-id + - cachetools + - cachey + - dask + - fastapi + - jinja2 + - jmespath + - openpyxl + - orjson + - packaging + - pandas + - prometheus_client + - psutil + - pydantic >=2,<3 + - pydantic-settings >=2,<3 + - python-dateutil + - python-jose + - python-multipart + - sqlalchemy + - starlette + - tiled-base 0.1.0a117 pyhd8ed1ab_0 + - toolz + - uvicorn + - watchgod + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 7905 + timestamp: 1713243282503 +- conda: https://conda.anaconda.org/conda-forge/noarch/tiled-server-0.2.1-pyhd8ed1ab_0.conda + sha256: 81e36c420f6e729e4749a02f0f56aa902632156f57f90c5db518724d72280b16 + md5: a2d3df9d27ee4c133cef437e64ca4b7f + depends: + - adbc-driver-manager + - adbc-driver-postgresql + - adbc-driver-sqlite + - aiofiles + - aiosqlite + - alembic + - anyio + - asgi-correlation-id + - asyncpg + - cachetools + - canonicaljson + - fastapi + - jinja2 + - jmespath + - minio + - obstore + - openpyxl + - packaging + - prometheus_client + - pydantic >=2,<3 + - python >=3.10 + - python-dateutil + - python-duckdb <1.4.0 + - python-jose + - python-multipart + - redis-py + - sqlalchemy + - stamina + - starlette >=0.48.0 + - tiled-base 0.2.1 pyhd8ed1ab_0 + - toolz + - uvicorn + - watchfiles + - zarr + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 8689 + timestamp: 1762996783887 +- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + sha256: cafeec44494f842ffeca27e9c8b0c27ed714f93ac77ddadc6aaf726b5554ebac + md5: cffd3bdd58090148f4cfcd831f4b26ab + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + constrains: + - xorg-libx11 >=1.8.12,<2.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3301196 + timestamp: 1769460227866 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + sha256: 799cab4b6cde62f91f750149995d149bc9db525ec12595e8a1d91b9317f038b3 + md5: a9d86bc62f39b94c4661716624eb21b0 + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3127137 + timestamp: 1769460817696 +- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.1-pyhcf101f3_0.conda + sha256: 91cafdb64268e43e0e10d30bd1bef5af392e69f00edd34dfaf909f69ab2da6bd + md5: b5325cf06a000c5b14970462ff5e4d58 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/tomli?source=compressed-mapping + size: 21561 + timestamp: 1774492402955 +- conda: https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda + sha256: 4e379e1c18befb134247f56021fdf18e112fb35e64dd1691858b0a0f3bea9a45 + md5: c07a6153f8306e45794774cf9b13bd32 + depends: + - python >=3.10 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/toolz?source=hash-mapping + size: 53978 + timestamp: 1760707830681 +- conda: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.5-py310h7c4b9e2_0.conda + sha256: ffe36f9b042eeb8b24f18d769ce7fac76279e8593c9a3bc22f6e43303762b75f + md5: 1a1943b805cbe2538f772a462214d874 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/tornado?source=hash-mapping + size: 668054 + timestamp: 1774358033371 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.5.5-py310h72544b6_0.conda + sha256: 791f99a01c823098a29383cae7238cc540079098934bd34bc57adf3c64515c23 + md5: a20e87b9fe64b86f9ed414c887c9669c + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/tornado?source=compressed-mapping + size: 666858 + timestamp: 1774358813446 +- conda: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.3-pyh8f84b5b_0.conda + sha256: 9ef8e47cf00e4d6dcc114eb32a1504cc18206300572ef14d76634ba29dfe1eb6 + md5: e5ce43272193b38c2e9037446c1d9206 + depends: + - python >=3.10 + - __unix + - python + license: MPL-2.0 and MIT + purls: + - pkg:pypi/tqdm?source=compressed-mapping + size: 94132 + timestamp: 1770153424136 +- conda: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda + sha256: f39a5620c6e8e9e98357507262a7869de2ae8cc07da8b7f84e517c9fd6c2b959 + md5: 019a7385be9af33791c989871317e1ed + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/traitlets?source=hash-mapping + size: 110051 + timestamp: 1733367480074 +- conda: https://conda.anaconda.org/conda-forge/noarch/twine-6.2.0-pyhcf101f3_0.conda + sha256: 0370098cab22773e33755026bf78539c2f05645fce7dcc9713d01e21950756bb + md5: 901a86453fa6183e914b937643619a03 + depends: + - id + - importlib-metadata >=3.6 + - keyring >=21.2.0 + - packaging >=24.0 + - python >=3.10 + - readme_renderer >=35.0 + - requests >=2.20 + - requests-toolbelt >=0.8.0,!=0.9.0 + - rfc3986 >=1.4.0 + - rich >=12.0.0 + - urllib3 >=1.26.0 + - python + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/twine?source=hash-mapping + size: 42488 + timestamp: 1757013705407 +- conda: https://conda.anaconda.org/conda-forge/noarch/typer-0.24.1-pyhcf101f3_0.conda + sha256: 859aec3457a4d6dd6e4a68d9f4ad4216ce05e1a1a94d244f10629848de77739b + md5: 0bb9dfbe0806165f4960331a0ac05ab5 + depends: + - annotated-doc >=0.0.2 + - click >=8.2.1 + - python >=3.10 + - rich >=12.3.0 + - shellingham >=1.3.0 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/typer?source=compressed-mapping + size: 116134 + timestamp: 1775138098187 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda + sha256: 7c2df5721c742c2a47b2c8f960e718c930031663ac1174da67c1ed5999f7938c + md5: edd329d7d3a4ab45dcf905899a7a6115 + depends: + - typing_extensions ==4.15.0 pyhcf101f3_0 + license: PSF-2.0 + license_family: PSF + purls: [] + size: 91383 + timestamp: 1756220668932 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing-inspection-0.4.2-pyhd8ed1ab_1.conda + sha256: 70db27de58a97aeb7ba7448366c9853f91b21137492e0b4430251a1870aa8ff4 + md5: a0a4a3035667fc34f29bfbd5c190baa6 + depends: + - python >=3.10 + - typing_extensions >=4.12.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/typing-inspection?source=compressed-mapping + size: 18923 + timestamp: 1764158430324 +- conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda + sha256: 032271135bca55aeb156cee361c81350c6f3fb203f57d024d7e5a1fc9ef18731 + md5: 0caa1af407ecff61170c9437a808404d + depends: + - python >=3.10 + - python + license: PSF-2.0 + license_family: PSF + purls: + - pkg:pypi/typing-extensions?source=hash-mapping + size: 51692 + timestamp: 1756220668932 +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + sha256: 1d30098909076af33a35017eed6f2953af1c769e273a0626a04722ac4acaba3c + md5: ad659d0a2b3e47e38d829aa8cad2d610 + license: LicenseRef-Public-Domain + purls: [] + size: 119135 + timestamp: 1767016325805 +- conda: https://conda.anaconda.org/conda-forge/noarch/tzlocal-5.3.1-pyh8f84b5b_0.conda + sha256: 6447388bd870ab0a2b38af5aa64185cd71028a2a702f0935e636a01d81fba7fc + md5: 369f3170d6f727d3102d83274e403b66 + depends: + - python >=3.10 + - __unix + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/tzlocal?source=hash-mapping + size: 23880 + timestamp: 1756227235167 +- conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py310h03d9f68_0.conda + sha256: 24a1140fa1dcaf2d2b7da1014eba5801eae8c4c025bb17845a3b1b6c487cf8f7 + md5: c3b1f5bc28ae6282ba95156d18fde825 + depends: + - __glibc >=2.17,<3.0.a0 + - cffi + - libgcc >=14 + - libstdcxx >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ukkonen?source=hash-mapping + size: 14822 + timestamp: 1769438718477 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ukkonen-1.1.0-py310h1c35771_0.conda + sha256: a7b184d5f9ce0de4ca3a06517e2e2fa5a98a74a98b4c505c1df823b86829eec6 + md5: 2e3d1f8ab9f89d5fe0fb190560e74758 + depends: + - __osx >=11.0 + - cffi + - libcxx >=19 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT + license_family: MIT + purls: + - pkg:pypi/ukkonen?source=hash-mapping + size: 14720 + timestamp: 1769439408979 +- conda: https://conda.anaconda.org/conda-forge/noarch/uncertainties-3.2.3-pyhd8ed1ab_0.conda + sha256: 6bee1d370931b1ef4105635c66fa9e2350c1d180e22de0ba031810752a20762b + md5: 0ef430c64b59f8e67b0f668e26df2d00 + depends: + - future + - numpy + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/uncertainties?source=hash-mapping + size: 56653 + timestamp: 1745274434534 +- conda: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py310h7c4b9e2_0.conda + sha256: 44ecba51c98c3fb2ce3d00295d423d3bb254cde1790eff9818ed328aa608ab28 + md5: 234e9858dd691d3f597147e22cbf16cf + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/unicodedata2?source=hash-mapping + size: 410408 + timestamp: 1770909105501 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-17.0.1-py310h72544b6_0.conda + sha256: 48e56c2068cce4b2d09cceca65ca1c877ebf550e3ad67af3ed30db162bc89e0b + md5: a35cf23aa03fb8d3a95158253918ed00 + depends: + - __osx >=11.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: Apache-2.0 + license_family: Apache + purls: + - pkg:pypi/unicodedata2?source=hash-mapping + size: 416056 + timestamp: 1770910020955 +- conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.5.0-pyhd8ed1ab_0.conda + sha256: 4fb9789154bd666ca74e428d973df81087a697dbb987775bc3198d2215f240f8 + md5: 436c165519e140cb08d246a4472a9d6a + depends: + - brotli-python >=1.0.9 + - h2 >=4,<5 + - pysocks >=1.5.6,<2.0,!=1.5.7 + - python >=3.9 + - zstandard >=0.18.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/urllib3?source=hash-mapping + size: 101735 + timestamp: 1750271478254 +- conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-0.44.0-pyhc90fa1f_0.conda + sha256: a1db6280c2bee294e625bd3026f0b0792e8f21454d105baa530a28effd8d8d09 + md5: 83d36e00ae3614c8c3bb0e55e24c7f50 + depends: + - __unix + - click >=7.0 + - h11 >=0.8 + - python >=3.10 + - typing_extensions >=4.0 + - python + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/uvicorn?source=compressed-mapping + size: 55522 + timestamp: 1775475773715 +- conda: https://conda.anaconda.org/conda-forge/noarch/uvicorn-standard-0.44.0-h4457471_0.conda + sha256: dd5c71e2539b9f05e629ff275185d1a0022c998aaf75d6c019919b01e4adb57b + md5: ab23bf4012a60c25855e0bb4c0145af0 + depends: + - __unix + - uvicorn ==0.44.0 pyhc90fa1f_0 + - websockets >=10.4 + - httptools >=0.6.3 + - watchfiles >=0.20 + - python-dotenv >=0.13 + - pyyaml >=5.1 + - uvloop >=0.15.1 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 4145 + timestamp: 1775475773715 +- conda: https://conda.anaconda.org/conda-forge/linux-64/uvloop-0.22.1-py310h7c4b9e2_1.conda + sha256: f215816ce787227c19be8c512598b75edf627c04de2e6281183af6b2d0b9b5e1 + md5: cfba069d1af8b66f4fec88d23c98c363 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libuv >=1.51.0,<2.0a0 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + license: MIT OR Apache-2.0 + purls: + - pkg:pypi/uvloop?source=hash-mapping + size: 575950 + timestamp: 1762472820451 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/uvloop-0.22.1-py310hfe3a0ae_1.conda + sha256: 4ba362d78555dbf3add5ef900b84f7ab34d14289c68dbb5715357594070de343 + md5: c63a073be5ece028f6e410fb95e96a9d + depends: + - __osx >=11.0 + - libuv >=1.51.0,<2.0a0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + license: MIT OR Apache-2.0 + purls: + - pkg:pypi/uvloop?source=hash-mapping + size: 478239 + timestamp: 1762473282528 +- conda: https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.2.3-pyhcf101f3_0.conda + sha256: 58dd18a33928b5dcbeb43ec5286a572df5ba4ddfd0805d07db10da4177cff5ef + md5: 842144d66e29a809d4c0cba435b8522c + depends: + - python >=3.10 + - distlib >=0.3.7,<1 + - filelock <4,>=3.24.2 + - importlib-metadata >=6.6 + - platformdirs >=3.9.1,<5 + - python-discovery >=1 + - typing_extensions >=4.13.2 + - python + license: MIT + purls: + - pkg:pypi/virtualenv?source=hash-mapping + size: 4658736 + timestamp: 1776162998994 +- conda: https://conda.anaconda.org/conda-forge/linux-64/watchfiles-1.1.1-py310hdfeec95_0.conda + sha256: 86c7d114a66259183ce8f915cf4aee82b15b34b500cfb9dc34e51726848a2439 + md5: 22469e330a3cea75dcc4e2ae46f257bb + depends: + - __glibc >=2.17,<3.0.a0 + - anyio >=3.0.0 + - libgcc >=14 + - python >=3.10,<3.11.0a0 + - python_abi 3.10.* *_cp310 + constrains: + - __glibc >=2.17 + license: MIT + license_family: MIT + purls: + - pkg:pypi/watchfiles?source=hash-mapping + size: 413743 + timestamp: 1760456807594 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/watchfiles-1.1.1-py310h53169e7_0.conda + sha256: 0b321ab2f46d66dc611537588afa251683a624096c78f885dbeae6867717b11c + md5: 25ca0949e01c3c7153e60ad81db4032d + depends: + - __osx >=11.0 + - anyio >=3.0.0 + - python >=3.10,<3.11.0a0 + - python >=3.10,<3.11.0a0 *_cpython + - python_abi 3.10.* *_cp310 + constrains: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/watchfiles?source=hash-mapping + size: 360639 + timestamp: 1760457446314 +- conda: https://conda.anaconda.org/conda-forge/noarch/watchgod-0.7-pyhd8ed1ab_0.tar.bz2 + sha256: 317e825e463c4e8057e49432d5c2ae3d8371419645af94aeb618190d86ceadb8 + md5: 579cdbf560b3d891212227eb1576948e + depends: + - python >=3.5 + license: MIT + license_family: MIT + purls: + - pkg:pypi/watchgod?source=hash-mapping + size: 14187 + timestamp: 1612037983750 +- conda: https://conda.anaconda.org/conda-forge/linux-64/wayland-1.25.0-hd6090a7_0.conda + sha256: ea374d57a8fcda281a0a89af0ee49a2c2e99cc4ac97cf2e2db7064e74e764bdb + md5: 996583ea9c796e5b915f7d7580b51ea6 + depends: + - __glibc >=2.17,<3.0.a0 + - libexpat >=2.7.4,<3.0a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + purls: [] + size: 334139 + timestamp: 1773959575393 +- conda: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.6.0-pyhd8ed1ab_0.conda + sha256: e298b508b2473c4227206800dfb14c39e4b14fd79d4636132e9e1e4244cdf4aa + md5: c3197f8c0d5b955c904616b716aca093 + depends: + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/wcwidth?source=compressed-mapping + size: 71550 + timestamp: 1770634638503 +- conda: https://conda.anaconda.org/conda-forge/linux-64/websockets-16.0-py310h139afa4_1.conda + sha256: 056c95a0a964678f8c1e8d7544768a70ffb7e32a382f04cdf56d26c80866dd84 + md5: 5494bc9ff7adbeadfdf3b946126af290 + depends: + - python + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/websockets?source=hash-mapping + size: 296372 + timestamp: 1768087389982 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/websockets-16.0-py310haea493c_1.conda + sha256: 1e3787f276bef508a8a898ed583d170e7dd0a0893970860b46e7db9e5dd206f9 + md5: 45bcd83128946849e05ad8b271dfe8d8 + depends: + - python + - __osx >=11.0 + - python 3.10.* *_cpython + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/websockets?source=hash-mapping + size: 299935 + timestamp: 1768087418866 +- conda: https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda + sha256: d6cf2f0ebd5e09120c28ecba450556ce553752652d91795442f0e70f837126ae + md5: bdbd7385b4a67025ac2dba4ef8cb6a8f + depends: + - packaging >=24.0 + - python >=3.10 + license: MIT + license_family: MIT + purls: + - pkg:pypi/wheel?source=hash-mapping + size: 31858 + timestamp: 1769139207397 +- conda: https://conda.anaconda.org/conda-forge/noarch/xarray-2025.6.1-pyhd8ed1ab_1.conda + sha256: e27b45ca791cfbcad37d64b8615d0672d94aafa00b014826fcbca2ce18bd1cc0 + md5: 145c6f2ac90174d9ad1a2a51b9d7c1dd + depends: + - numpy >=1.24 + - packaging >=23.2 + - pandas >=2.1 + - python >=3.10 + constrains: + - scipy >=1.11 + - dask-core >=2023.11 + - bottleneck >=1.3 + - zarr >=2.16 + - flox >=0.7 + - h5py >=3.8 + - iris >=3.7 + - cartopy >=0.22 + - numba >=0.57 + - sparse >=0.14 + - pint >=0.22 + - distributed >=2023.11 + - hdf5 >=1.12 + - seaborn-base >=0.13 + - nc-time-axis >=1.4 + - matplotlib-base >=3.8 + - toolz >=0.12 + - netcdf4 >=1.6.0 + - cftime >=1.6 + - h5netcdf >=1.3 + license: Apache-2.0 + license_family: APACHE + purls: + - pkg:pypi/xarray?source=hash-mapping + size: 879913 + timestamp: 1749743321359 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-h4f16b4b_2.conda + sha256: ad8cab7e07e2af268449c2ce855cbb51f43f4664936eff679b1f3862e6e4b01d + md5: fdc27cb255a7a2cc73b7919a968b48f0 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libxcb >=1.17.0,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 20772 + timestamp: 1750436796633 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda + sha256: c2be9cae786fdb2df7c2387d2db31b285cf90ab3bfabda8fa75a596c3d20fc67 + md5: 4d1fc190b99912ed557a8236e958c559 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libxcb >=1.13 + - libxcb >=1.17.0,<2.0a0 + - xcb-util-image >=0.4.0,<0.5.0a0 + - xcb-util-renderutil >=0.3.10,<0.4.0a0 + license: MIT + license_family: MIT + purls: [] + size: 20829 + timestamp: 1763366954390 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda + sha256: 94b12ff8b30260d9de4fd7a28cca12e028e572cbc504fd42aa2646ec4a5bded7 + md5: a0901183f08b6c7107aab109733a3c91 + depends: + - libgcc-ng >=12 + - libxcb >=1.16,<2.0.0a0 + - xcb-util >=0.4.1,<0.5.0a0 + license: MIT + license_family: MIT + purls: [] + size: 24551 + timestamp: 1718880534789 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda + sha256: 546e3ee01e95a4c884b6401284bb22da449a2f4daf508d038fdfa0712fe4cc69 + md5: ad748ccca349aec3e91743e08b5e2b50 + depends: + - libgcc-ng >=12 + - libxcb >=1.16,<2.0.0a0 + license: MIT + license_family: MIT + purls: [] + size: 14314 + timestamp: 1718846569232 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda + sha256: 2d401dadc43855971ce008344a4b5bd804aca9487d8ebd83328592217daca3df + md5: 0e0cbe0564d03a99afd5fd7b362feecd + depends: + - libgcc-ng >=12 + - libxcb >=1.16,<2.0.0a0 + license: MIT + license_family: MIT + purls: [] + size: 16978 + timestamp: 1718848865819 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda + sha256: 31d44f297ad87a1e6510895740325a635dd204556aa7e079194a0034cdd7e66a + md5: 608e0ef8256b81d04456e8d211eee3e8 + depends: + - libgcc-ng >=12 + - libxcb >=1.16,<2.0.0a0 + license: MIT + license_family: MIT + purls: [] + size: 51689 + timestamp: 1718844051451 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda + sha256: 19c2bb14bec84b0e995b56b752369775c75f1589314b43733948bb5f471a6915 + md5: b56e0c8432b56decafae7e78c5f29ba5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.13,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 399291 + timestamp: 1772021302485 +- conda: https://conda.anaconda.org/conda-forge/noarch/xlrd-2.0.2-pyhd8ed1ab_0.conda + sha256: 64f09069d8b3a3791643230cedc80d9f9422f667e3e328b40d527375352fe8d4 + md5: 91f5637b706492b9e418da1872fd61ce + depends: + - python >=3.10 + license: BSD-3-Clause AND BSD-4-Clause + license_family: BSD + purls: + - pkg:pypi/xlrd?source=hash-mapping + size: 93671 + timestamp: 1756170155688 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda + sha256: c12396aabb21244c212e488bbdc4abcdef0b7404b15761d9329f5a4a39113c4b + md5: fb901ff28063514abb6046c9ec2c4a45 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: MIT + license_family: MIT + purls: [] + size: 58628 + timestamp: 1734227592886 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda + sha256: 277841c43a39f738927145930ff963c5ce4c4dacf66637a3d95d802a64173250 + md5: 1c74ff8c35dcadf952a16f752ca5aa49 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - libuuid >=2.38.1,<3.0a0 + - xorg-libice >=1.1.2,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 27590 + timestamp: 1741896361728 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda + sha256: 516d4060139dbb4de49a4dcdc6317a9353fb39ebd47789c14e6fe52de0deee42 + md5: 861fb6ccbc677bb9a9fb2468430b9c6a + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libxcb >=1.17.0,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 839652 + timestamp: 1770819209719 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda + sha256: 6bc6ab7a90a5d8ac94c7e300cc10beb0500eeba4b99822768ca2f2ef356f731b + md5: b2895afaf55bf96a8c8282a2e47a5de0 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 15321 + timestamp: 1762976464266 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxau-1.0.12-hc919400_1.conda + sha256: adae11db0f66f86156569415ed79cda75b2dbf4bea48d1577831db701438164f + md5: 78b548eed8227a689f93775d5d23ae09 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 14105 + timestamp: 1762976976084 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda + sha256: 048c103000af9541c919deef03ae7c5e9c570ffb4024b42ecb58dbde402e373a + md5: f2ba4192d38b6cef2bb2c25029071d90 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxfixes >=6.0.2,<7.0a0 + license: MIT + license_family: MIT + purls: [] + size: 14415 + timestamp: 1770044404696 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda + sha256: 832f538ade441b1eee863c8c91af9e69b356cd3e9e1350fff4fe36cc573fc91a + md5: 2ccd714aa2242315acaf0a67faea780b + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + - xorg-libxrender >=0.9.11,<0.10.0a0 + license: MIT + license_family: MIT + purls: [] + size: 32533 + timestamp: 1730908305254 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda + sha256: 43b9772fd6582bf401846642c4635c47a9b0e36ca08116b3ec3df36ab96e0ec0 + md5: b5fcc7172d22516e1f965490e65e33a4 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + license: MIT + license_family: MIT + purls: [] + size: 13217 + timestamp: 1727891438799 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda + sha256: 25d255fb2eef929d21ff660a0c687d38a6d2ccfbcbf0cc6aa738b12af6e9d142 + md5: 1dafce8548e38671bea82e3f5c6ce22f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 20591 + timestamp: 1762976546182 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/xorg-libxdmcp-1.1.5-hc919400_1.conda + sha256: f7fa0de519d8da589995a1fe78ef74556bb8bc4172079ae3a8d20c3c81354906 + md5: 9d1299ace1924aa8f4e0bc8e71dd0cf7 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 19156 + timestamp: 1762977035194 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda + sha256: 79c60fc6acfd3d713d6340d3b4e296836a0f8c51602327b32794625826bd052f + md5: 34e54f03dfea3e7a2dcf1453a85f1085 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 50326 + timestamp: 1769445253162 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda + sha256: 83c4c99d60b8784a611351220452a0a85b080668188dce5dfa394b723d7b64f4 + md5: ba231da7fccf9ea1e768caf5c7099b84 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 20071 + timestamp: 1759282564045 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda + sha256: 1a724b47d98d7880f26da40e45f01728e7638e6ec69f35a3e11f92acd05f9e7a + md5: 17dcc85db3c7886650b8908b183d6876 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxfixes >=6.0.1,<7.0a0 + license: MIT + license_family: MIT + purls: [] + size: 47179 + timestamp: 1727799254088 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda + sha256: 80ed047a5cb30632c3dc5804c7716131d767089f65877813d4ae855ee5c9d343 + md5: e192019153591938acf7322b6459d36e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxrender >=0.9.12,<0.10.0a0 + license: MIT + license_family: MIT + purls: [] + size: 30456 + timestamp: 1769445263457 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda + sha256: 044c7b3153c224c6cedd4484dd91b389d2d7fd9c776ad0f4a34f099b3389f4a1 + md5: 96d57aba173e878a2089d5638016dc5e + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 33005 + timestamp: 1734229037766 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda + sha256: 752fdaac5d58ed863bbf685bb6f98092fe1a488ea8ebb7ed7b606ccfce08637a + md5: 7bbe9a0cc0df0ac5f5a8ad6d6a11af2f + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + - xorg-libx11 >=1.8.10,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + - xorg-libxi >=1.7.10,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 32808 + timestamp: 1727964811275 +- conda: https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda + sha256: 64db17baaf36fa03ed8fae105e2e671a7383e22df4077486646f7dbf12842c9f + md5: 665d152b9c6e78da404086088077c844 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - xorg-libx11 >=1.8.12,<2.0a0 + - xorg-libxext >=1.3.6,<2.0a0 + license: MIT + license_family: MIT + purls: [] + size: 18701 + timestamp: 1769434732453 +- pypi: git+https://github.com/Nikea/xray-vision?rev=master#fc01c7bf7ca25fc2e824ed6f16481b4fa78a79a6 + name: xray-vision + version: 0.1.1 + requires_dist: + - matplotlib + - numpy + - pandas + - pyqt5 + - scipy + - six +- conda: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2026.3.0-pyhd8ed1ab_0.conda + sha256: 663ea9b00d68c2da309114923924686ab6d3f59ef1b196c5029ba16799e7bb07 + md5: 4487b9c371d0161d54b5c7bbd890c0fc + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/xyzservices?source=hash-mapping + size: 51732 + timestamp: 1774900074457 +- conda: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda + sha256: 6d9ea2f731e284e9316d95fa61869fe7bbba33df7929f82693c121022810f4ad + md5: a77f85f77be52ff59391544bfe73390a + depends: + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + license: MIT + license_family: MIT + purls: [] + size: 85189 + timestamp: 1753484064210 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h925e9cb_3.conda + sha256: b03433b13d89f5567e828ea9f1a7d5c5d697bf374c28a4168d71e9464f5dafac + md5: 78a0fe9e9c50d2c381e8ee47e3ea437d + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 83386 + timestamp: 1753484079473 +- conda: https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_1.conda + sha256: 02c045d3ab97bd5a713b0f35b05f017603d33bd728694ce3cf843c45c2906535 + md5: 3e9a0fee25417c432c4780b9597fc312 + depends: + - asciitree + - fasteners + - numcodecs >=0.10.0,<0.16.0a0 + - numpy >=1.24,<3.0 + - python >=3.10 + constrains: + - notebook + - ipytree >=0.2.2 + - ipywidgets >=8.0.0 + license: MIT + license_family: MIT + purls: + - pkg:pypi/zarr?source=hash-mapping + size: 160013 + timestamp: 1733237313723 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda + sha256: 5fabe6cccbafc1193038862b0b0d784df3dae84bc48f12cac268479935f9c8b7 + md5: 6a0eb48e58684cca4d7acc8b7a0fd3c7 + depends: + - __glibc >=2.17,<3.0.a0 + - _openmp_mutex >=4.5 + - libgcc >=14 + - libstdcxx >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 277694 + timestamp: 1766549572069 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zfp-1.0.1-ha86207d_5.conda + sha256: 5b8bc86ca206f456ca9fe9e1a629f68b949ac47070211bccf4b44d29141c85d7 + md5: 581bd74656ccd460cf2bbe152292a1eb + depends: + - __osx >=11.0 + - libcxx >=19 + - llvm-openmp >=19.1.7 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 204043 + timestamp: 1766549790975 +- conda: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda + sha256: 5488542dceeb9f2874e726646548ecc5608060934d6f9ceaa7c6a48c61f9cc8d + md5: e52c2ef711ccf31bb7f70ca87d144b9e + depends: + - python >=3.9 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/zict?source=hash-mapping + size: 36341 + timestamp: 1733261642963 +- conda: https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.1-pyhcf101f3_0.conda + sha256: 523616c0530d305d2216c2b4a8dfd3872628b60083255b89c5e0d8c42e738cca + md5: e1c36c6121a7c9c76f2f148f1e83b983 + depends: + - python >=3.10 + - python + license: MIT + license_family: MIT + purls: + - pkg:pypi/zipp?source=compressed-mapping + size: 24461 + timestamp: 1776131454755 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.2-h25fd6f3_2.conda + sha256: 245c9ee8d688e23661b95e3c6dd7272ca936fabc03d423cdb3cdee1bbcf9f2f2 + md5: c2a01a08fc991620a74b32420e97868a + depends: + - __glibc >=2.17,<3.0.a0 + - libzlib 1.3.2 h25fd6f3_2 + license: Zlib + license_family: Other + purls: [] + size: 95931 + timestamp: 1774072620848 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-1.3.2-h8088a28_2.conda + sha256: 8dd2ac25f0ba714263aac5832d46985648f4bfb9b305b5021d702079badc08d2 + md5: f1c0bce276210bed45a04949cfe8dc20 + depends: + - __osx >=11.0 + - libzlib 1.3.2 h8088a28_2 + license: Zlib + license_family: Other + purls: [] + size: 81123 + timestamp: 1774072974535 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.2.5-hde8ca8f_1.conda + sha256: 84ea17cb646d8a916d9335415f57c9e5dd001de158972322c714ebe1b72670b0 + md5: c860578a89dc9b6003d600181612287c + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: Zlib + license_family: Other + purls: [] + size: 110969 + timestamp: 1764162891322 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zlib-ng-2.2.5-h3470cca_1.conda + sha256: 90aaf9d2aab71610607e48b5d8110147d71b5316ebff28b18de8b460b9c92e83 + md5: c2a50447e98f4b6b1357f54bbd9379dd + depends: + - __osx >=11.0 + - libcxx >=19 + license: Zlib + license_family: Other + purls: [] + size: 87305 + timestamp: 1764163041065 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.25.0-py310h139afa4_1.conda + sha256: b0103e8bb639dbc6b9de8ef9a18a06b403b687a33dec83c25bd003190942259a + md5: 3741aefc198dfed2e3c9adc79d706bb7 + depends: + - python + - cffi >=1.11 + - zstd >=1.5.7,<1.5.8.0a0 + - libgcc >=14 + - __glibc >=2.17,<3.0.a0 + - zstd >=1.5.7,<1.6.0a0 + - python_abi 3.10.* *_cp310 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/zstandard?source=hash-mapping + size: 455614 + timestamp: 1762512676430 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstandard-0.25.0-py310hf151d32_1.conda + sha256: be3cd0e825959c8401f083319ef97892115e4dfddb661da088648e442d25008a + md5: 82044ec889ec97e36401ef1fc40b05fc + depends: + - python + - cffi >=1.11 + - zstd >=1.5.7,<1.5.8.0a0 + - python 3.10.* *_cpython + - __osx >=11.0 + - python_abi 3.10.* *_cp310 + - zstd >=1.5.7,<1.6.0a0 + license: BSD-3-Clause + license_family: BSD + purls: + - pkg:pypi/zstandard?source=hash-mapping + size: 377436 + timestamp: 1762512758954 +- conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + sha256: 68f0206ca6e98fea941e5717cec780ed2873ffabc0e1ed34428c061e2c6268c7 + md5: 4a13eeac0b5c8e5b8ab496e6c4ddd829 + depends: + - __glibc >=2.17,<3.0.a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 601375 + timestamp: 1764777111296 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda + sha256: 9485ba49e8f47d2b597dd399e88f4802e100851b27c21d7525625b0b4025a5d9 + md5: ab136e4c34e97f34fb621d2592a393d8 + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 433413 + timestamp: 1764777166076 diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 0000000..3855f36 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,52 @@ +[project] +name = "pyCHX" +channels = ["conda-forge"] +platforms = ["osx-arm64", "linux-64"] + +[dependencies] +python = "3.10.*" +pip = "*" +cython = "*" +dask = "*" +databroker = "*" +dill = "*" +historydict = "*" +ipython = "*" +lmfit = "*" +matplotlib = "*" +numpy = "*" +pandas = "*" +pillow = "*" +pyyaml = "*" +reportlab = "*" +scikit-image = "*" +scipy = "*" +tifffile = "*" +tqdm = "*" + +[pypi-dependencies] +scikit-beam = { git = "https://github.com/scikit-beam/scikit-beam", rev = "main" } +eiger-io = {git = "https://github.com/NSLS-II-CHX/eiger-io", rev="master"} +chxtools = {git = "https://github.com/NSLS2/chxtools", rev = "main"} +xray-vision = {git = "https://github.com/Nikea/xray-vision", rev="master"} +modestimage = {git = "https://github.com/ChrisBeaumont/mpl-modest-image", rev="master"} + +[feature.dev.dependencies] +black = "*" +codecov = "*" +coverage = "*" +flake8 = "*" +isort = "*" +nbstripout = "*" +pre-commit = "*" +pre-commit-hooks = "*" +pytest = "*" +sphinx = "*" +twine = "*" +numpydoc = "*" +sphinx-copybutton = "*" +sphinx_rtd_theme = "*" + +[environments] +dev = ["dev"] + diff --git a/pyproject.toml b/pyproject.toml index 3239179..3791f85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,4 +17,4 @@ exclude = ''' | blib2to3 | tests/data )/ -''' +''' \ No newline at end of file From 227f9c91e6198f43c1f346669e2b512a48e4f4fc Mon Sep 17 00:00:00 2001 From: jennmald Date: Tue, 14 Apr 2026 15:28:02 -0400 Subject: [PATCH 2/7] pre-commit fix --- pixi.toml | 1 - pyCHX/tests/test_simpleimport.py | 2 +- pyproject.toml | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/pixi.toml b/pixi.toml index 3855f36..f6a3480 100644 --- a/pixi.toml +++ b/pixi.toml @@ -49,4 +49,3 @@ sphinx_rtd_theme = "*" [environments] dev = ["dev"] - diff --git a/pyCHX/tests/test_simpleimport.py b/pyCHX/tests/test_simpleimport.py index 14d4729..09c1ff1 100644 --- a/pyCHX/tests/test_simpleimport.py +++ b/pyCHX/tests/test_simpleimport.py @@ -1,3 +1,3 @@ def test_import(): "Check that the main pyCHX imports work" - from pyCHX.chx_packages import * + from pyCHX.chx_packages import * diff --git a/pyproject.toml b/pyproject.toml index 3791f85..3239179 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,4 +17,4 @@ exclude = ''' | blib2to3 | tests/data )/ -''' \ No newline at end of file +''' From 54e7bb450fed208cac9ad56d428ed3f57d599991 Mon Sep 17 00:00:00 2001 From: jennmald Date: Wed, 15 Apr 2026 10:45:00 -0400 Subject: [PATCH 3/7] first round of pre-commit changes with the new config.yaml file --- .github/workflows/docs.yml | 2 +- .github/workflows/publish-pypi.yml | 40 +- .gitignore | 2 +- .pre-commit-config.yaml | 101 +- README.md | 11 +- RELEASE.md | 15 +- chx_packages_local.py | 278 +---- pyCHX/Badpixels.py | 4 +- pyCHX/Compress_readerNew.py | 16 +- pyCHX/Create_Report.py | 423 +++++-- pyCHX/DEVs.py | 82 +- pyCHX/DataGonio.py | 174 ++- pyCHX/SAXS.py | 127 +- pyCHX/Stitching.py | 65 +- pyCHX/Two_Time_Correlation_Function.py | 223 +++- pyCHX/XPCS_GiSAXS.py | 499 ++++++-- pyCHX/XPCS_SAXS.py | 432 +++++-- pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py | 124 +- pyCHX/_version.py | 44 +- pyCHX/chx_Fitters2D.py | 35 +- pyCHX/chx_compress.py | 144 ++- pyCHX/chx_compress_analysis.py | 105 +- pyCHX/chx_correlation.py | 72 +- pyCHX/chx_correlationc.py | 206 ++- pyCHX/chx_correlationp.py | 200 ++- pyCHX/chx_correlationp2.py | 135 +- pyCHX/chx_crosscor.py | 94 +- pyCHX/chx_generic_functions.py | 1104 +++++++++++++---- pyCHX/chx_handlers.py | 1 - pyCHX/chx_libs.py | 72 +- pyCHX/chx_olog.py | 7 +- pyCHX/chx_outlier_detection.py | 19 +- pyCHX/chx_packages.py | 256 +--- pyCHX/chx_speckle.py | 156 ++- pyCHX/chx_specklecp.py | 220 +++- pyCHX/chx_xpcs_xsvs_jupyter_V1.py | 521 ++++++-- pyCHX/movie_maker.py | 45 +- pyCHX/v2/_commonspeckle/DEVs.py | 67 +- pyCHX/v2/_commonspeckle/DataGonio.py | 73 +- pyCHX/v2/_commonspeckle/SAXS.py | 70 +- pyCHX/v2/_commonspeckle/Stitching.py | 58 +- .../Two_Time_Correlation_Function.py | 102 +- pyCHX/v2/_commonspeckle/XPCS_GiSAXS.py | 499 ++++++-- pyCHX/v2/_commonspeckle/XPCS_SAXS.py | 421 +++++-- .../XPCS_XSVS_SAXS_Multi_2017_V4.py | 51 +- pyCHX/v2/_commonspeckle/chx_Fitters2D.py | 32 +- pyCHX/v2/_commonspeckle/chx_compress.py | 127 +- .../_commonspeckle/chx_compress_analysis.py | 54 +- pyCHX/v2/_commonspeckle/chx_correlation.py | 53 +- pyCHX/v2/_commonspeckle/chx_correlationc.py | 112 +- pyCHX/v2/_commonspeckle/chx_correlationp.py | 108 +- pyCHX/v2/_commonspeckle/chx_correlationp2.py | 73 +- .../_commonspeckle/chx_generic_functions.py | 636 +++++++--- pyCHX/v2/_commonspeckle/chx_handlers.py | 1 - pyCHX/v2/_commonspeckle/chx_libs.py | 54 +- pyCHX/v2/_commonspeckle/chx_olog.py | 7 +- pyCHX/v2/_commonspeckle/chx_speckle.py | 56 +- pyCHX/v2/_commonspeckle/chx_specklecp.py | 104 +- .../chx_xpcs_xsvs_jupyter_V1.py | 207 +++- pyCHX/v2/_commonspeckle/movie_maker.py | 12 +- pyCHX/v2/_commonspeckle/xpcs_timepixel.py | 50 +- pyCHX/v2/_futurepyCHX/Badpixels.py | 5 +- pyCHX/v2/_futurepyCHX/Compress_readerNew.py | 16 +- pyCHX/v2/_futurepyCHX/Create_Report.py | 157 ++- pyCHX/v2/_futurepyCHX/DEVs.py | 67 +- pyCHX/v2/_futurepyCHX/DataGonio.py | 69 +- pyCHX/v2/_futurepyCHX/SAXS.py | 64 +- pyCHX/v2/_futurepyCHX/Stitching.py | 49 +- .../Two_Time_Correlation_Function.py | 90 +- pyCHX/v2/_futurepyCHX/XPCS_GiSAXS.py | 499 ++++++-- pyCHX/v2/_futurepyCHX/XPCS_SAXS.py | 419 +++++-- .../XPCS_XSVS_SAXS_Multi_2017_V4.py | 49 +- pyCHX/v2/_futurepyCHX/chx_Fitters2D.py | 32 +- pyCHX/v2/_futurepyCHX/chx_compress.py | 122 +- .../v2/_futurepyCHX/chx_compress_analysis.py | 55 +- pyCHX/v2/_futurepyCHX/chx_correlation.py | 53 +- pyCHX/v2/_futurepyCHX/chx_correlationc.py | 112 +- pyCHX/v2/_futurepyCHX/chx_correlationp.py | 91 +- pyCHX/v2/_futurepyCHX/chx_correlationp2.py | 56 +- pyCHX/v2/_futurepyCHX/chx_crosscor.py | 80 +- .../v2/_futurepyCHX/chx_generic_functions.py | 636 +++++++--- pyCHX/v2/_futurepyCHX/chx_handlers.py | 1 - pyCHX/v2/_futurepyCHX/chx_libs.py | 57 +- pyCHX/v2/_futurepyCHX/chx_olog.py | 7 +- pyCHX/v2/_futurepyCHX/chx_packages.py | 250 +--- pyCHX/v2/_futurepyCHX/chx_speckle.py | 56 +- pyCHX/v2/_futurepyCHX/chx_specklecp.py | 103 +- .../_futurepyCHX/chx_xpcs_xsvs_jupyter_V1.py | 202 ++- pyCHX/v2/_futurepyCHX/movie_maker.py | 12 +- pyCHX/v2/_futurepyCHX/xpcs_timepixel.py | 46 +- pyCHX/xpcs_timepixel.py | 119 +- requirements-dev.txt | 10 +- requirements.txt | 10 +- run_tests.py | 1 - setup.py | 4 +- test-requirements.txt | 2 +- versioneer.py | 78 +- 97 files changed, 8630 insertions(+), 4130 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 1783664..6fbf96b 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -25,7 +25,7 @@ jobs: - name: Checkout the code uses: actions/checkout@v3 with: - fetch-depth: 1000 # should be enough to reach the most recent tag + fetch-depth: 1000 # should be enough to reach the most recent tag - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 2413f97..f80c18c 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -14,26 +14,26 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.x' + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.x" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install wheel twine setuptools + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install wheel twine setuptools - - name: Build and publish - env: - TWINE_USERNAME: __token__ - # The PYPI_PASSWORD must be a pypi token with the "pypi-" prefix with sufficient permissions to upload this package - # https://pypi.org/help/#apitoken - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* + - name: Build and publish + env: + TWINE_USERNAME: __token__ + # The PYPI_PASSWORD must be a pypi token with the "pypi-" prefix with sufficient permissions to upload this package + # https://pypi.org/help/#apitoken + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.gitignore b/.gitignore index 93b82de..7930927 100755 --- a/.gitignore +++ b/.gitignore @@ -90,7 +90,7 @@ docs/_build/ *.tiff *.tif -#generated documntation files +#generated documentation files doc/resource/api/generated/ # Enaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9b86b19..c449a03 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,96 @@ -exclude: '(v2)/.*' -default_language_version: - python: python3 +ci: + autoupdate_commit_msg: "chore(deps): update pre-commit hooks" + autofix_commit_msg: "style: pre-commit fixes" + autoupdate_schedule: "quarterly" + +exclude: "^({{cookiecutter\\.project_name}}|hooks/pre_gen_project.py$)" + repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: "v6.0.0" hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks - id: check-yaml + - id: debug-statements - id: end-of-file-fixer + - id: mixed-line-ending + - id: name-tests-test + args: ["--pytest-test-first"] + - id: requirements-txt-fixer - id: trailing-whitespace - - repo: https://github.com/ambv/black - rev: 24.4.2 + + - repo: https://github.com/adamchainz/blacken-docs + rev: "1.20.0" hooks: - - id: black - - repo: https://github.com/pycqa/isort - rev: 5.13.2 + - id: blacken-docs + additional_dependencies: [black==24.*] + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.15.6" hooks: - - id: isort - args: ["--profile", "black"] - - repo: https://github.com/kynan/nbstripout - rev: 0.7.1 + - id: ruff-check + args: ["--fix", "--show-fixes"] + - id: ruff-format + + - repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" hooks: - - id: nbstripout + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.19.1" + hooks: + - id: mypy + files: "(src|tests|noxfile.py)" + args: [] + additional_dependencies: + - click + - markdown-it-py + - pytest + - nox + - orjson + - repo-review>=0.10.6 + - rich + - tomli>=2.0.2 + - types-PyYAML + + - repo: https://github.com/rbubley/mirrors-prettier + rev: "v3.8.1" + hooks: + - id: prettier + types_or: [yaml, markdown, html, css, scss, javascript, json] + args: [--prose-wrap=always] + + - repo: https://github.com/crate-ci/typos + rev: "v1.44.0" + hooks: + - id: typos + exclude: ^Gemfile\.lock$ + + - repo: local + hooks: + - id: disallow-caps + name: Disallow improper capitalization + language: pygrep + entry: PyBind|Numpy|Cmake|CCache|Github|PyTest|RST|PyLint + exclude: (.pre-commit-config.yaml|docs/pages/guides/style\.md)$ + - id: disallow-words + name: Disallow certain words + language: pygrep + entry: "[Ff]alsey" + exclude: .pre-commit-config.yaml$ + - id: disallow-bad-permalinks + name: Disallow _ in permalinks + language: pygrep + entry: "^permalink:.*_.*" + - id: cog + name: Cog the pages + language: python + entry: cog -P -r -I ./helpers + files: "^docs/pages/guides/(packaging_compiled|docs|tasks|gha_basic).md|^copier.yml|^docs/_includes/pyproject.md" + additional_dependencies: [cogapp, cookiecutter, tomlkit] diff --git a/README.md b/README.md index f41cbac..aa9931c 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,13 @@ -pyCHX -- CHX XPCS Data Analysis Packages -======== +# pyCHX -- CHX XPCS Data Analysis Packages -Repository for data collection and analysis scripts that are useful at the -CHX beamline at NSLS-II (11-ID) developed by Dr. Yugang Zhang (yuzhang@bnl.gov). +Repository for data collection and analysis scripts that are useful at the CHX +beamline at NSLS-II (11-ID) developed by Dr. Yugang Zhang (yuzhang@bnl.gov). Installation instructions on Linux: -Install miniconda from https://conda.io/miniconda.html. Then create the environment for pyCHX: +Install miniconda from https://conda.io/miniconda.html. Then create the +environment for pyCHX: + ``` conda create --name pyCHX python=3.6 numpy scipy matplotlib source activate pyCHX diff --git a/RELEASE.md b/RELEASE.md index d365a2a..91921fc 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,10 +1,9 @@ Steps for releasing a version of pyCHX - -* Make sure that you have an up-to-date copy of the master branch. -* Make an empty commit to serve as a marker in the history. This is technically - optional, but it is nice to do. ``git commit --allow-empty -m "REL: v0.0.1"`` -* Now make the tag. This should never be delete, so make sure you are certain. - ``git tag v0.0.1`` -* Push the commit up to github: ``git push upstream master`` -* Push the tag also: ``git push upstream v0.0.1`` +- Make sure that you have an up-to-date copy of the master branch. +- Make an empty commit to serve as a marker in the history. This is technically + optional, but it is nice to do. `git commit --allow-empty -m "REL: v0.0.1"` +- Now make the tag. This should never be delete, so make sure you are certain. + `git tag v0.0.1` +- Push the commit up to github: `git push upstream master` +- Push the tag also: `git push upstream v0.0.1` diff --git a/chx_packages_local.py b/chx_packages_local.py index 6fdf903..f59bb54 100644 --- a/chx_packages_local.py +++ b/chx_packages_local.py @@ -1,320 +1,50 @@ ### This enables local import of pyCHX for testing -import pickle as cpk - -import historydict # from pyCHX.chx_handlers import use_dask, use_pims -from chx_handlers import use_dask, use_pims +from chx_handlers import use_pims # from pyCHX.chx_libs import ( from chx_libs import ( - EigerHandler, - Javascript, - LogNorm, - Model, - cmap_albula, - cmap_vge, - datetime, db, - getpass, - h5py, - multi_tau_lags, - np, - os, - pims, - plt, - random, - roi, - time, - tqdm, - utils, - warnings, ) -from eiger_io.fs_handler import EigerImages -from skimage.draw import line, line_aa, polygon # changes to current version of chx_packages.py # added load_dask_data in generic_functions -use_pims(db) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' +use_pims( + db +) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' # from pyCHX.chx_compress import ( -from chx_compress import ( - MultifileBNLCustom, - combine_binary_files, - compress_eigerdata, - create_compress_header, - get_eigerImage_per_file, - init_compress_eigerdata, - para_compress_eigerdata, - para_segment_compress_eigerdata, - read_compressed_eigerdata, - segment_compress_eigerdata, -) # from pyCHX.chx_compress_analysis import ( -from chx_compress_analysis import ( - Multifile, - cal_each_ring_mean_intensityc, - cal_waterfallc, - compress_eigerdata, - get_avg_imgc, - get_each_frame_intensityc, - get_each_ring_mean_intensityc, - get_time_edge_avg_img, - mean_intensityc, - plot_each_ring_mean_intensityc, - plot_waterfallc, - read_compressed_eigerdata, -) # from pyCHX.chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq -from chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq # from pyCHX.chx_correlationp import _one_time_process_errorp, auto_two_Arrayp, cal_g2p, cal_GPF, get_g2_from_ROI_GPF -from chx_correlationp import _one_time_process_errorp, auto_two_Arrayp, cal_g2p, cal_GPF, get_g2_from_ROI_GPF # from pyCHX.chx_crosscor import CrossCorrelator2, run_para_ccorr_sym -from chx_crosscor import CrossCorrelator2, run_para_ccorr_sym # from pyCHX.chx_generic_functions import ( -from chx_generic_functions import ( - R_2, - RemoveHot, - apply_mask, - average_array_withNan, - check_bad_uids, - check_lost_metadata, - check_ROI_intensity, - check_shutter_open, - combine_images, - copy_data, - create_cross_mask, - create_fullImg_with_box, - create_hot_pixel_mask, - create_multi_rotated_rectangle_mask, - create_polygon_mask, - create_rectangle_mask, - create_ring_mask, - create_seg_ring, - create_time_slice, - create_user_folder, - delete_data, - extract_data_from_file, - filter_roi_mask, - find_bad_pixels, - find_bad_pixels_FD, - find_good_xpcs_uids, - find_index, - find_uids, - fit_one_peak_curve, - get_averaged_data_from_multi_res, - get_avg_img, - get_bad_frame_list, - get_base_all_filenames, - get_cross_point, - get_current_pipeline_filename, - get_current_pipeline_fullpath, - get_curve_turning_points, - get_detector, - get_detectors, - get_each_frame_intensity, - get_echos, - get_eigerImage_per_file, - get_fit_by_two_linear, - get_fra_num_by_dose, - get_g2_fit_general, - get_image_edge, - get_image_with_roi, - get_img_from_iq, - get_last_uids, - get_mass_center_one_roi, - get_max_countc, - get_meta_data, - get_multi_tau_lag_steps, - get_non_uniform_edges, - get_print_uids, - get_q_rate_fit_general, - get_qval_dict, - get_qval_qwid_dict, - get_roi_mask_qval_qwid_by_shift, - get_roi_nr, - get_series_g2_taus, - get_SG_norm, - get_sid_filenames, - get_today_date, - get_touched_qwidth, - get_waxs_beam_center, - lin2log_g2, - linear_fit, - load_dask_data, - load_data, - load_mask, - load_pilatus, - ls_dir, - mask_badpixels, - mask_exclude_badpixel, - move_beamstop, - pad_length, - pload_obj, - plot1D, - plot_fit_two_linear_fit, - plot_g2_general, - plot_q_g2fitpara_general, - plot_q_rate_fit_general, - plot_q_rate_general, - plot_xy_with_fit, - plot_xy_x2, - print_dict, - psave_obj, - read_dict_csv, - refine_roi_mask, - reverse_updown, - ring_edges, - run_time, - save_array_to_tiff, - save_arrays, - save_current_pipeline, - save_dict_csv, - save_g2_fit_para_tocsv, - save_g2_general, - save_lists, - save_oavs_tifs, - sgolay2d, - shift_mask, - show_img, - show_ROI_on_image, - shrink_image, - trans_data_to_pd, - update_qval_dict, - update_roi_mask, - validate_uid, -) # from pyCHX.chx_olog import Attachment, LogEntry, update_olog_id, update_olog_uid, update_olog_uid_with_file -from chx_olog import Attachment, LogEntry, update_olog_id, update_olog_uid, update_olog_uid_with_file # from pyCHX.chx_outlier_detection import ( -from chx_outlier_detection import is_outlier, outlier_mask # from pyCHX.chx_specklecp import ( -from chx_specklecp import ( - get_binned_his_std, - get_contrast, - get_his_std_from_pds, - get_xsvs_fit, - plot_g2_contrast, - plot_xsvs_fit, - save_bin_his_std, - save_KM, - xsvsc, - xsvsp, -) # from pyCH.chx_xpcs_xsvs_jupyter_V1 import( -from chx_xpcs_xsvs_jupyter_V1 import ( - compress_multi_uids, - do_compress_on_line, - get_fra_num_by_dose, - get_iq_from_uids, - get_series_g2_from_g12, - get_series_one_time_mulit_uids, - get_t_iqc_uids, - get_two_time_mulit_uids, - get_uids_by_range, - get_uids_in_time_period, - plot_dose_g2, - plot_entries_from_csvlist, - plot_entries_from_uids, - plot_t_iqc_uids, - plot_t_iqtMq2, - realtime_xpcs_analysis, - run_xpcs_xsvs_single, - wait_data_acquistion_finish, - wait_func, -) # from pyCHX.Create_Report import ( -from Create_Report import ( - create_multi_pdf_reports_for_uids, - create_one_pdf_reports_for_uids, - create_pdf_report, - export_xpcs_results_to_h5, - extract_xpcs_results_from_h5, - make_pdf_report, -) # from pyCHX.DataGonio import qphiavg -from DataGonio import qphiavg # from pyCHX.SAXS import ( -from SAXS import ( - fit_form_factor, - fit_form_factor2, - form_factor_residuals_bg_lmfit, - form_factor_residuals_lmfit, - get_form_factor_fit_lmfit, - poly_sphere_form_factor_intensity, - show_saxs_qmap, -) # from pyCHX.Two_Time_Correlation_Function import ( -from Two_Time_Correlation_Function import ( - get_aged_g2_from_g12, - get_aged_g2_from_g12q, - get_four_time_from_two_time, - get_one_time_from_two_time, - rotate_g12q_to_rectangle, - show_C12, -) # from pyCHX.XPCS_GiSAXS import ( -from XPCS_GiSAXS import ( - cal_1d_qr, - convert_gisaxs_pixel_to_q, - fit_qr_qz_rate, - get_1d_qr, - get_each_box_mean_intensity, - get_gisaxs_roi, - get_qedge, - get_qmap_label, - get_qr_tick_label, - get_qzr_map, - get_qzrmap, - get_reflected_angles, - get_t_qrc, - multi_uids_gisaxs_xpcs_analysis, - plot_gisaxs_g4, - plot_gisaxs_two_g2, - plot_qr_1d_with_ROI, - plot_qrt_pds, - plot_qzr_map, - plot_t_qrc, - show_qzr_map, - show_qzr_roi, -) # from pyCHX.XPCS_SAXS import ( -from XPCS_SAXS import ( - cal_g2, - combine_two_roi_mask, - create_hot_pixel_mask, - get_angular_mask, - get_circular_average, - get_cirucular_average_std, - get_each_ring_mean_intensity, - get_QrQw_From_RoiMask, - get_ring_mask, - get_seg_from_ring_mask, - get_t_iq, - get_t_iqc, - get_t_iqc_imstack, - multi_uids_saxs_xpcs_analysis, - plot_circular_average, - plot_qIq_with_ROI, - plot_t_iqc, - recover_img_from_iq, - save_lists, -) diff --git a/pyCHX/Badpixels.py b/pyCHX/Badpixels.py index 7b7dc5b..ac619e7 100644 --- a/pyCHX/Badpixels.py +++ b/pyCHX/Badpixels.py @@ -90,7 +90,9 @@ 4155535, ] ), # 57 points, coralpor - "6cc34a": np.array([1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535]), # coralpor + "6cc34a": np.array( + [1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535] + ), # coralpor } diff --git a/pyCHX/Compress_readerNew.py b/pyCHX/Compress_readerNew.py index 8d69158..6f83ee5 100644 --- a/pyCHX/Compress_readerNew.py +++ b/pyCHX/Compress_readerNew.py @@ -54,7 +54,7 @@ def __init__(self, filename, mode="rb", nbytes=2): numimgs: num images """ if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode @@ -123,7 +123,9 @@ def index(self): def _read_header(self, n): """Read header from current seek position.""" if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # read in bytes cur = self.frame_indexes[n] header_raw = self._fd[cur : cur + self.HEADER_SIZE] @@ -146,7 +148,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) cur = self.frame_indexes[n] + 1024 dlen = self._read_header(n)["dlen"] @@ -212,7 +216,7 @@ def __init__(self, filename, mode="rb"): raise ValueError("Write mode 'wb' not supported yet") if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode @@ -307,7 +311,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # dlen is 4 bytes cur = self.frame_indexes[n] dlen = np.frombuffer(self._fd[cur : cur + 4], dtype=" %s & y -> %s| Shutter Mode: %s" - % (md["detector_distance"], md["feedback_x"], md["feedback_y"], md["shutter mode"]) + % ( + md["detector_distance"], + md["feedback_x"], + md["feedback_y"], + md["shutter mode"], + ) ) ####line 6 'Detector-Sample Distance.. if self.report_type == "saxs": - s7 = "Beam Center: [%s, %s] (pixel)" % (md["beam_center_x"], md["beam_center_y"]) + s7 = "Beam Center: [%s, %s] (pixel)" % ( + md["beam_center_x"], + md["beam_center_y"], + ) elif self.report_type == "gi_saxs": s7 = ( - "Incident Center: [%s, %s] (pixel)" % (md["beam_center_x"], md["beam_center_y"]) + "Incident Center: [%s, %s] (pixel)" + % (md["beam_center_x"], md["beam_center_y"]) + " || " - + "Reflect Center: [%s, %s] (pixel)" % (md["beam_refl_center_x"], md["beam_refl_center_y"]) + + "Reflect Center: [%s, %s] (pixel)" + % (md["beam_refl_center_x"], md["beam_refl_center_y"]) ) elif self.report_type == "ang_saxs" or self.report_type == "gi_waxs": - s7 = "Beam Center: [%s, %s] (pixel)" % (md["beam_center_x"], md["beam_center_y"]) + s7 = "Beam Center: [%s, %s] (pixel)" % ( + md["beam_center_x"], + md["beam_center_y"], + ) else: s7 = "" s7 += " || " + "BadLen: %s" % len(md["bad_frame_list"]) s7 += " || " + "Transmission: %s" % md["transmission"] s.append(s7) ####line 7 'Beam center... - m = "Mask file: %s" % md["mask_file"] + " || " + "ROI mask file: %s" % md["roi_mask_file"] + m = ( + "Mask file: %s" % md["mask_file"] + + " || " + + "ROI mask file: %s" % md["roi_mask_file"] + ) # s.append( 'Mask file: %s'%md['mask_file'] ) ####line 8 mask filename # s.append( ) ####line 8 mask filename s.append(m) @@ -552,9 +593,13 @@ def report_meta(self, top=740, new_page=False): self.data_dir_ = self.data_dir s.append("Analysis Results Dir: %s" % self.data_dir_) ####line 9 results folder - s.append("Metadata Dir: %s.csv-&.pkl" % self.metafile) ####line 10 metadata folder + s.append( + "Metadata Dir: %s.csv-&.pkl" % self.metafile + ) ####line 10 metadata folder try: - s.append("Pipeline notebook: %s" % md["NOTEBOOK_FULL_PATH"]) ####line 11 notebook folder + s.append( + "Pipeline notebook: %s" % md["NOTEBOOK_FULL_PATH"] + ) ####line 11 notebook folder except: pass # print( 'here' ) @@ -695,7 +740,11 @@ def report_ROI(self, top=300, new_page=False): ) # add q_Iq - if self.report_type == "saxs" or self.report_type == "gi_saxs" or self.report_type == "ang_saxs": + if ( + self.report_type == "saxs" + or self.report_type == "gi_saxs" + or self.report_type == "ang_saxs" + ): imgf = self.ROI_on_Iq_file img_height = 180 img_left, img_top = 320, top - ds @@ -737,7 +786,9 @@ def report_time_analysis(self, top=720, new_page=False): top1 = top ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Time Dependent Plot" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Time Dependent Plot" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top = top1 - 160 @@ -759,7 +810,17 @@ def report_time_analysis(self, top=720, new_page=False): str2_left, str2_top = ipos, top - 5 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) # plot iq~t @@ -898,7 +959,9 @@ def report_oavs(self, top=350, oavs_file=None, new_page=False): # print( imgf,self.data_dir ) print(img_width, img_height) - def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page=False): + def report_one_time( + self, top=350, g2_fit_file=None, q_rate_file=None, new_page=False + ): """create the one time correlation function report Two images: One Time Correlation Function with fit @@ -912,7 +975,9 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= c.setFont("Helvetica", 20) ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. One Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. One Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) # add g2 plot if g2_fit_file is None: @@ -929,10 +994,9 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= img_height = 550 top = top - 600 str2_left, str2_top = 80, top - 400 - # add one_time caculation + # add one_time calculation img_left, img_top = 1, top if self.g2_fit_new_page or self.g2_new_page: - img_height = 550 top = top - 250 str2_left, str2_top = 80, top - 0 @@ -1085,8 +1149,8 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= c.showPage() c.save() - def report_mulit_one_time(self, top=720, new_page=False): - """create the mulit one time correlation function report + def report_multi_one_time(self, top=720, new_page=False): + """create the multi one time correlation function report Two images: One Time Correlation Function with fit q-rate fit @@ -1098,7 +1162,9 @@ def report_mulit_one_time(self, top=720, new_page=False): c.setFont("Helvetica", 20) ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. One Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. One Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) # add g2 plot top = top - 320 @@ -1128,7 +1194,9 @@ def report_mulit_one_time(self, top=720, new_page=False): im = Image.open(image) ratio = float(im.size[1]) / im.size[0] height = 180 - c.drawImage(image, 350, top, width=height / ratio, height=height, mask="auto") + c.drawImage( + image, 350, top, width=height / ratio, height=height, mask="auto" + ) c.setFont("Helvetica", 16) c.setFillColor(blue) @@ -1154,7 +1222,9 @@ def report_two_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Two Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Two Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1186,7 +1256,6 @@ def report_two_time(self, top=720, new_page=False): imgf = self.two_g2_file if True: # not self.two_g2_new_page: - img_height = 300 img_left, img_top = 100 - 70, top str1_left, str1_top, str1 = 210 - 70, top + 310, "compared g2" @@ -1217,7 +1286,11 @@ def report_two_time(self, top=720, new_page=False): img_height = 140 img_left, img_top = 350, top + 30 str2_left, str2_top = 380 - 80, top - 5 - str1_left, str1_top, str1 = 450 - 80, top + 230, "q-rate fit from two-time" + str1_left, str1_top, str1 = ( + 450 - 80, + top + 230, + "q-rate fit from two-time", + ) else: img_height = 90 @@ -1257,7 +1330,9 @@ def report_four_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Four Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Four Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1274,11 +1349,25 @@ def report_four_time(self, top=720, new_page=False): img_height = 600 top -= 300 img_left, img_top = 80, top - str1_left, str1_top, str1 = 180, top + 300 - 250, "four time correlation function" + str1_left, str1_top, str1 = ( + 180, + top + 300 - 250, + "four time correlation function", + ) str2_left, str2_top = 180, top - 10 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) if new_page: @@ -1321,7 +1410,17 @@ def report_dose(self, top=720, new_page=False): img_left, img_top = 100, top add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) if new_page: @@ -1341,7 +1440,9 @@ def report_flow_pv_g2(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Flow One Time Analysis" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Flow One Time Analysis" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1356,7 +1457,17 @@ def report_flow_pv_g2(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "XPCS Vertical Flow" str2_left, str2_top = 180, top - 10 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) imgf = self.flow_g2v_rate_fit @@ -1365,7 +1476,17 @@ def report_flow_pv_g2(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "" str2_left, str2_top = 350, top - 10 + 50 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) top = top - 340 @@ -1376,7 +1497,17 @@ def report_flow_pv_g2(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "XPCS Parallel Flow" str2_left, str2_top = 180, top - 10 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) imgf = self.flow_g2p_rate_fit @@ -1385,7 +1516,17 @@ def report_flow_pv_g2(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "" str2_left, str2_top = 350, top - 10 + 50 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) if new_page: @@ -1405,7 +1546,9 @@ def report_flow_pv_two_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Flow One &Two Time Comparison" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Flow One &Two Time Comparison" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1440,7 +1583,17 @@ def report_flow_pv_two_time(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "XPCS Vertical Flow by two-time" str2_left, str2_top = 180, top - 10 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) imgf = self.flow_g2bp_rate_fit @@ -1449,7 +1602,17 @@ def report_flow_pv_two_time(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "" str2_left, str2_top = 350, top - 10 + 50 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) top = top - 340 @@ -1461,7 +1624,17 @@ def report_flow_pv_two_time(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "XPCS Parallel Flow by two-time" str2_left, str2_top = 180, top - 10 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) imgf = self.flow_g2bv_rate_fit @@ -1470,7 +1643,17 @@ def report_flow_pv_two_time(self, top=720, new_page=False): str1_left, str1_top, str1 = 210, top + 300, "" str2_left, str2_top = 350, top - 10 + 50 add_image_string( - c, imgf, self.data_dir, img_left, img_top, img_height, str1_left, str1_top, str1, str2_left, str2_top + c, + imgf, + self.data_dir, + img_left, + img_top, + img_height, + str1_left, + str1_top, + str1, + str2_left, + str2_top, ) if new_page: @@ -1490,7 +1673,9 @@ def report_xsvs(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Visibility Analysis" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Visibility Analysis" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top = top - 330 # add xsvs fit @@ -1504,7 +1689,7 @@ def report_xsvs(self, top=720, new_page=False): img_height=300, str1_left=210, str1_top=top + 300, - str1="XSVS_Fit_by_Negtive_Binomal Function", + str1="XSVS_Fit_by_Negative_Binomal Function", str2_left=180, str2_top=top - 10, ) @@ -1538,7 +1723,7 @@ def report_xsvs(self, top=720, new_page=False): c.drawImage(image, 100, top, width=height / ratio, height=height, mask=None) c.setFont("Helvetica", 16) c.setFillColor(blue) - c.drawString(210, top + 300, "XSVS_Fit_by_Negtive_Binomal Function") + c.drawString(210, top + 300, "XSVS_Fit_by_Negative_Binomal Function") c.setFont("Helvetica", 12) c.setFillColor(red) c.drawString(180, top - 10, "filename: %s" % imgf) @@ -1580,7 +1765,9 @@ def done(self): print("*" * 40) -def create_multi_pdf_reports_for_uids(uids, g2, data_dir, report_type="saxs", append_name=""): +def create_multi_pdf_reports_for_uids( + uids, g2, data_dir, report_type="saxs", append_name="" +): """Aug 16, YG@CHX-NSLS-II Create multi pdf reports for each uid in uids uids: a list of uids to be reported @@ -1615,7 +1802,9 @@ def create_multi_pdf_reports_for_uids(uids, g2, data_dir, report_type="saxs", ap c.done() -def create_one_pdf_reports_for_uids(uids, g2, data_dir, filename="all_in_one", report_type="saxs"): +def create_one_pdf_reports_for_uids( + uids, g2, data_dir, filename="all_in_one", report_type="saxs" +): """Aug 16, YG@CHX-NSLS-II Create one pdf reports for each uid in uids uids: a list of uids to be reported @@ -1623,7 +1812,9 @@ def create_one_pdf_reports_for_uids(uids, g2, data_dir, filename="all_in_one", r data_dir: Save pdf report in data dir """ - c = create_pdf_report(data_dir, uid=filename, out_dir=data_dir, load=False, report_type=report_type) + c = create_pdf_report( + data_dir, uid=filename, out_dir=data_dir, load=False, report_type=report_type + ) page = 1 for key in list(g2.keys()): @@ -1652,9 +1843,9 @@ def save_res_h5(full_uid, data_dir, save_two_time=False): YG. Nov 10, 2016 save the results to a h5 file will save meta data/avg_img/mask/roi (ring_mask or box_mask)/ - will aslo save multi-tau calculated one-time correlation function g2/taus + will also save multi-tau calculated one-time correlation function g2/taus will also save two-time derived one-time correlation function /g2b/taus2 - if save_two_time if True, will save two-time correaltion function + if save_two_time if True, will save two-time correlation function """ with h5py.File(data_dir + "%s.h5" % full_uid, "w") as hf: # write meta data @@ -1688,9 +1879,9 @@ def load_res_h5(full_uid, data_dir): """YG. Nov 10, 2016 load results from a h5 file will load meta data/avg_img/mask/roi (ring_mask or box_mask)/ - will aslo load multi-tau calculated one-time correlation function g2/taus + will also load multi-tau calculated one-time correlation function g2/taus will also load two-time derived one-time correlation function /g2b/taus2 - if save_two_time if True, will load two-time correaltion function + if save_two_time if True, will load two-time correlation function """ with h5py.File(data_dir + "%s.h5" % full_uid, "r") as hf: @@ -1709,7 +1900,17 @@ def load_res_h5(full_uid, data_dir): g12b_h5 = np.array(hf.get("g12b")) if "g12b" in hf: - return meta_data, avg_h5, mask_h5, roi_h5, g2_h5, taus_h5, g2b_h5, taus2_h5, g12b + return ( + meta_data, + avg_h5, + mask_h5, + roi_h5, + g2_h5, + taus_h5, + g2b_h5, + taus2_h5, + g12b, + ) else: return meta_data, avg_h5, mask_h5, roi_h5, g2_h5, taus_h5, g2b_h5, taus2_h5 @@ -1865,7 +2066,9 @@ def recursively_save_dict_contents_to_group(h5file, path, dic): # print( 'here' ) h5file[path + key] = item if not h5file[path + key].value == item: - raise ValueError("The data representation in the HDF5 file does not match the original dict.") + raise ValueError( + "The data representation in the HDF5 file does not match the original dict." + ) # save numpy arrays elif isinstance(item, np.ndarray): try: @@ -1874,7 +2077,9 @@ def recursively_save_dict_contents_to_group(h5file, path, dic): item = np.array(item).astype("|S9") h5file[path + key] = item if not np.array_equal(h5file[path + key].value, item): - raise ValueError("The data representation in the HDF5 file does not match the original dict.") + raise ValueError( + "The data representation in the HDF5 file does not match the original dict." + ) # save dictionaries elif isinstance(item, dict): recursively_save_dict_contents_to_group(h5file, path + key + "/", item) @@ -1891,7 +2096,9 @@ def recursively_load_dict_contents_from_group(h5file, path): if isinstance(item, h5py._hl.dataset.Dataset): ans[key] = item.value elif isinstance(item, h5py._hl.group.Group): - ans[key] = recursively_load_dict_contents_from_group(h5file, path + key + "/") + ans[key] = recursively_load_dict_contents_from_group( + h5file, path + key + "/" + ) return ans @@ -1926,11 +2133,19 @@ def export_xpcs_results_to_h5(filename, export_dir, export_dict): elif key in dict_nest: # print(key) try: - recursively_save_dict_contents_to_group(hf, "/%s/" % key, export_dict[key]) + recursively_save_dict_contents_to_group( + hf, "/%s/" % key, export_dict[key] + ) except: print("Can't export the key: %s in this dataset." % key) - elif key in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + elif key in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: try: export_dict[key].to_hdf( fout, @@ -1945,17 +2160,28 @@ def export_xpcs_results_to_h5(filename, export_dir, export_dict): data.set_fill_value = np.nan if flag: for key in list(export_dict.keys()): - if key in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + if key in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: export_dict[key].to_hdf( fout, key=key, mode="a", ) - print("The xpcs analysis results are exported to %s with filename as %s" % (export_dir, filename)) + print( + "The xpcs analysis results are exported to %s with filename as %s" + % (export_dir, filename) + ) -def extract_xpcs_results_from_h5_debug(filename, import_dir, onekey=None, exclude_keys=None): +def extract_xpcs_results_from_h5_debug( + filename, import_dir, onekey=None, exclude_keys=None +): """ YG. Dec 22, 2016 extract data from a h5 file @@ -1984,8 +2210,16 @@ def extract_xpcs_results_from_h5_debug(filename, import_dir, onekey=None, exclud for key in list(hf.keys()): if key not in exclude_keys: if key in dicts: - extract_dict[key] = recursively_load_dict_contents_from_group(hf, "/" + key + "/") - elif key in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + extract_dict[key] = recursively_load_dict_contents_from_group( + hf, "/" + key + "/" + ) + elif key in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: pds_type_keys.append(key) else: extract_dict[key] = np.array(hf.get(key)) @@ -1998,7 +2232,13 @@ def extract_xpcs_results_from_h5_debug(filename, import_dir, onekey=None, exclud md = hf.get("md") for key in list(md.attrs): extract_dict["md"][key] = md.attrs[key] - elif onekey in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + elif onekey in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: extract_dict[onekey] = pds.read_hdf(fp, key=onekey) else: try: @@ -2038,10 +2278,15 @@ def export_xpcs_results_to_h5_old(filename, export_dir, export_dict): k1 = export_dict[key] v1 = hf.create_dataset(key, (1,), dtype="i") for k2 in k1.keys(): - v2 = hf.create_dataset(k1, (1,), dtype="i") - elif key in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + elif key in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: export_dict[key].to_hdf( fout, key=key, @@ -2049,10 +2294,15 @@ def export_xpcs_results_to_h5_old(filename, export_dir, export_dict): ) else: data = hf.create_dataset(key, data=export_dict[key]) - print("The xpcs analysis results are exported to %s with filename as %s" % (export_dir, filename)) + print( + "The xpcs analysis results are exported to %s with filename as %s" + % (export_dir, filename) + ) -def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys=None, two_time_qindex=None): +def extract_xpcs_results_from_h5( + filename, import_dir, onekey=None, exclude_keys=None, two_time_qindex=None +): """ YG. Dec 22, 2016 extract data from a h5 file @@ -2064,7 +2314,6 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys extact_dict: dict, with keys as md, g2, g4 et.al. """ - import numpy as np import pandas as pds extract_dict = {} @@ -2089,7 +2338,13 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys else: extract_dict[key][key_] = md.attrs[key_] - elif key in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + elif key in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: pds_type_keys.append(key) else: if key == "g12b": @@ -2098,7 +2353,9 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys else: extract_dict[key] = hf.get(key)[:] else: - extract_dict[key] = hf.get(key)[:] # np.array( hf.get( key )) + extract_dict[key] = hf.get(key)[ + : + ] # np.array( hf.get( key )) for key in pds_type_keys: if key not in exclude_keys: @@ -2109,7 +2366,13 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys md = hf.get("md") for key in list(md.attrs): extract_dict["md"][key] = md.attrs[key] - elif onekey in ["g2_fit_paras", "g2b_fit_paras", "spec_km_pds", "spec_pds", "qr_1d_pds"]: + elif onekey in [ + "g2_fit_paras", + "g2b_fit_paras", + "spec_km_pds", + "spec_pds", + "qr_1d_pds", + ]: extract_dict[onekey] = pds.read_hdf(fp, key=onekey) else: try: @@ -2158,7 +2421,9 @@ def read_contrast_from_multi_h5( ) times_xsvs[i] = t["times_xsvs"][0] contri = extract_xpcs_results_from_h5( - filename="%s_Res.h5" % uid, import_dir=path + uid + "/", onekey="contrast_factorL" + filename="%s_Res.h5" % uid, + import_dir=path + uid + "/", + onekey="contrast_factorL", ) if i == 0: contr = np.zeros([N, contri["contrast_factorL"].shape[0]]) diff --git a/pyCHX/DEVs.py b/pyCHX/DEVs.py index 6e89cda..35aa061 100644 --- a/pyCHX/DEVs.py +++ b/pyCHX/DEVs.py @@ -1,7 +1,6 @@ # simple brute force multitau # from pyCHX.chx_generic_functions import average_array_withNan import numpy as np -import skbeam.core.roi as roi from numpy.fft import fft, ifft from tqdm import tqdm @@ -18,7 +17,7 @@ def fit_one_peak_curve(x, y, fit_range): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -43,7 +42,16 @@ def fit_one_peak_curve(x, y, fit_range): return cen, cen_std, wid, wid_std, xf, out -def plot_xy_with_fit(x, y, xf, out, xlim=[1e-3, 0.01], xlabel="q (" r"$\AA^{-1}$)", ylabel="I(q)", filename=None): +def plot_xy_with_fit( + x, + y, + xf, + out, + xlim=[1e-3, 0.01], + xlabel="q (" r"$\AA^{-1}$)", + ylabel="I(q)", + filename=None, +): """YG Dev@Aug 10, 2019 to plot x,y with fit, currently this code is dedicated to plot q-Iq with fit and show the fittign parameter, peak pos, peak wid""" @@ -155,7 +163,7 @@ def get_oneQ_g2_fft(time_inten_oneQ, axis=0): Input: time_inten_oneQ: 2d-array, shape=[time, pixel number in the ROI], a time dependent intensity for a list of pixels - ( the equivilent pixels belongs to one Q ) + ( the equivalent pixels belongs to one Q ) Return: G/(P*F) """ @@ -193,7 +201,7 @@ def get_g2_PF(time_inten): def auto_correlation_fft_padding_zeros(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -218,7 +226,12 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): N = M[axis] # print(M, N, 2*N-1) cor = np.real( - ifft(fft(a, n=N * 2 - 1, axis=axis) * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), n=N * 2 - 1, axis=axis) + ifft( + fft(a, n=N * 2 - 1, axis=axis) + * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), + n=N * 2 - 1, + axis=axis, + ) ) if len(M) == 1: @@ -233,7 +246,7 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): def auto_correlation_fft(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -294,7 +307,7 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): / noperbin ) G2[j, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t - for l in tqdm(np.arange(1, lvl), desc="Calcuate g2..."): + for l in tqdm(np.arange(1, lvl), desc="Calculate g2..."): nn = dII.shape[0] // 2 * 2 # make it even dII = (dII[0:nn:2, :] + dII[1:nn:2, :]) / 2.0 # sum in pairs nn = nn // 2 @@ -308,7 +321,9 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): * np.bincount(bind, np.mean(dII[:-j, :], axis=0)) / noperbin ) - G2[ind, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + G2[ind, :] = ( + np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + ) # print(ind) # print(time.time()-t0) return (tt[: ind + 1], G2[: ind + 1, :]) @@ -316,10 +331,10 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -340,7 +355,9 @@ def average_array_withNan(array, axis=0, mask=None): return sums / cts -def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True): +def autocor_for_pix_time( + pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True +): """YG Feb 20, 2018@CHX Do correlation for pixel_time type data with tau as defined as dly Input: @@ -360,7 +377,7 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No Gp = np.zeros([Ntau, Np]) Gf = np.zeros([Ntau, Np]) # mask_pix = np.isnan(pix_time_data) - # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calcuate g2...' ): + # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calculate g2...' ): tau_ind = 0 # if multi_tau_method: pix_time_datac = pix_time_data.copy() @@ -370,14 +387,18 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No if frame_norm is not None: pix_time_datac /= frame_norm - for tau_lev, tau_key in tqdm(enumerate(list(dly_dict.keys())), desc="Calcuate g2..."): + for tau_lev, tau_key in tqdm( + enumerate(list(dly_dict.keys())), desc="Calculate g2..." + ): # print(tau_key) taus = dly_dict[tau_key] if multi_tau_method: if tau_lev > 0: nobuf = len(dly_dict[1]) nn = pix_time_datac.shape[0] // 2 * 2 # make it even - pix_time_datac = (pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :]) / 2.0 # sum in pairs + pix_time_datac = ( + pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :] + ) / 2.0 # sum in pairs nn = nn // 2 if nn < nobuf: break @@ -437,7 +458,6 @@ def autocor_xytframe(self, n): ###################For Fit import matplotlib.pyplot as plt -import numpy as np from scipy.optimize import leastsq # duplicate my curfit function from yorick, except use sigma and not w @@ -452,7 +472,9 @@ def curfit(x, y, a, sigy=None, function_name=None, adj=None): function_name = funct # print( a, adj, a[adj] ) # print(x,y,a) - afit, cv, idt, m, ie = leastsq(_residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True) + afit, cv, idt, m, ie = leastsq( + _residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True + ) a[adj] = afit realcv = np.identity(afit.size) realcv[np.ix_(adj, adj)] = cv @@ -484,12 +506,15 @@ def fitpr(chisq, a, sigmaa, title=None, lbl=None): lbl = [] for i in xrange(a.size): lbl.append("A%(#)02d" % {"#": i}) - # print resuls of a fit. + # print results of a fit. if title != None: print(title) print(" chisq=%(c).4f" % {"c": chisq}) for i in range(a.size): - print(" %(lbl)8s =%(m)10.4f +/- %(s).4f" % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]}) + print( + " %(lbl)8s =%(m)10.4f +/- %(s).4f" + % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]} + ) # easy plot for fit @@ -512,7 +537,9 @@ def Gaussian(x, p): """ xo, amplitude, sigma, offset = p - g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-1 / 2.0 * (x - xo) ** 2 / sigma**2) + g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp( + -1 / 2.0 * (x - xo) ** 2 / sigma**2 + ) return g @@ -551,7 +578,8 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): th_list = np.linspace(th_min, th_max, th_n + 1) r_list = np.linspace(r_min, 1, r_n + 1) regions_list = [ - [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] for _ in range(th_n) + [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] + for _ in range(th_n) ] w = int(np.ceil(a * 2)) h = int(np.ceil(b * 2)) @@ -565,12 +593,18 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): cur_r = np.sqrt(cur_x**2 + cur_y**2) cur_elps_r = elps_r(a, b, cur_theta) cur_r_list = r_list * cur_elps_r - cur_theta = np.rad2deg(cur_theta) # Convert to degrees to compare with th_list + cur_theta = np.rad2deg( + cur_theta + ) # Convert to degrees to compare with th_list r_ind = place_in_interval(cur_r, cur_r_list) th_ind = place_in_interval(cur_theta, th_list) if (r_ind != -1) and (th_ind != -1): - regions_list[th_ind][r_ind][0] = np.append(regions_list[th_ind][r_ind][0], ii + x_offset) - regions_list[th_ind][r_ind][1] = np.append(regions_list[th_ind][r_ind][1], jj + y_offset) + regions_list[th_ind][r_ind][0] = np.append( + regions_list[th_ind][r_ind][0], ii + x_offset + ) + regions_list[th_ind][r_ind][1] = np.append( + regions_list[th_ind][r_ind][1], jj + y_offset + ) sectors = [] for th_reg_list in regions_list: for sector in th_reg_list: diff --git a/pyCHX/DataGonio.py b/pyCHX/DataGonio.py index b8603ef..c9a49c1 100644 --- a/pyCHX/DataGonio.py +++ b/pyCHX/DataGonio.py @@ -1,24 +1,29 @@ # import sys -import os -import re # Regular expressions -import sys -import matplotlib as mpl import numpy as np # from scipy.optimize import leastsq # import scipy.special import PIL # Python Image Library (for opening PNG, etc.) -import pylab as plt -import skbeam.core.correlation as corr -import skbeam.core.roi as roi import skbeam.core.utils as utils -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) from pyCHX.chx_generic_functions import average_array_withNan -def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, origin=None, mask=None, statistic="mean"): +def convert_Qmap( + img, + qx_map, + qy_map=None, + bins=None, + rangeq=None, + origin=None, + mask=None, + statistic="mean", +): """Y.G. Nov 3@CHX Convert a scattering image to a qmap by giving qx_map and qy_map Return converted qmap, x-coordinates and y-coordinates @@ -33,9 +38,18 @@ def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, origin=None, bins = qx_map.shape b2d = BinnedStatistic2D( - qx_map.ravel(), qy_map.ravel(), statistic=statistic, bins=bins, mask=mask.ravel(), range=rangeq + qx_map.ravel(), + qy_map.ravel(), + statistic=statistic, + bins=bins, + mask=mask.ravel(), + range=rangeq, + ) + remesh_data, xbins, ybins = ( + b2d(img.ravel()), + b2d.bin_centers[0], + b2d.bin_centers[1], ) - remesh_data, xbins, ybins = b2d(img.ravel()), b2d.bin_centers[0], b2d.bin_centers[1] else: if rangeq is None: @@ -58,7 +72,16 @@ def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, origin=None, return remesh_data, xbins, ybins -def qphiavg(image, q_map=None, phi_map=None, mask=None, bins=None, origin=None, range=None, statistic="mean"): +def qphiavg( + image, + q_map=None, + phi_map=None, + mask=None, + bins=None, + origin=None, + range=None, + statistic="mean", +): """Octo 20, 2017 Yugang According to Julien's Suggestion Get from https://github.com/CFN-softbio/SciStreams/blob/master/SciStreams/processing/qphiavg.py With a small revision --> return three array rather than dict @@ -93,7 +116,12 @@ def qphiavg(image, q_map=None, phi_map=None, mask=None, bins=None, origin=None, mask = mask.reshape(-1) rphibinstat = BinnedStatistic2D( - q_map.reshape(-1), phi_map.reshape(-1), statistic=statistic, bins=bins, mask=mask, range=range + q_map.reshape(-1), + phi_map.reshape(-1), + statistic=statistic, + bins=bins, + mask=mask, + range=range, ) sqphi = rphibinstat(image.ravel()) @@ -119,7 +147,9 @@ def get_QPhiMap(img_shape, center): return q_map, phi_map -def get_img_qphimap(img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean"): +def get_img_qphimap( + img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean" +): """Y.G., Dev Nov 10, 2018 Get phi_map by giving image e.g., q_map, phi_map = get_QPhiMap( mask.shape, center[::-1]) @@ -174,7 +204,12 @@ def convert_Qmap_old(img, qx_map, qy_map=None, bins=None, rangeq=None): bins = qx_map.shape remesh_data, xbins, ybins = np.histogram2d( - qx_map.ravel(), qy_map.ravel(), bins=bins, range=rangeq, normed=False, weights=img.ravel() + qx_map.ravel(), + qy_map.ravel(), + bins=bins, + range=rangeq, + normed=False, + weights=img.ravel(), ) else: @@ -499,7 +534,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -514,7 +551,7 @@ def _generate_qxyz_maps(self): ################################################################################ class CalibrationGonio(Calibration): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -523,7 +560,15 @@ class CalibrationGonio(Calibration): ######################################## def set_angles( - self, det_phi_g=0.0, det_theta_g=0.0, sam_phi=0, sam_chi=0, sam_theta=0, offset_x=0, offset_y=0, offset_z=0 + self, + det_phi_g=0.0, + det_theta_g=0.0, + sam_phi=0, + sam_chi=0, + sam_theta=0, + offset_x=0, + offset_y=0, + offset_z=0, ): """ YG. Add sample rotation angles that convert qmap from lab frame to sample frame @@ -551,7 +596,7 @@ def set_angles( self.sam_chi = sam_chi self.sam_theta = sam_theta - def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): + def rotation_matrix(self, sam_phi, sam_theta, sam_chi, degrees=True): """ sam_phi, rotate along lab-frame x, CHX phi sam_chi, rotate along lab-frame z, CHX chi @@ -559,23 +604,45 @@ def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): """ if degrees: - sam_phi, sam_chi, sam_theta = np.radians(sam_phi), np.radians(sam_chi), np.radians(sam_theta) + sam_phi, sam_chi, sam_theta = ( + np.radians(sam_phi), + np.radians(sam_chi), + np.radians(sam_theta), + ) - Rx = np.array([[1, 0, 0], [0, np.cos(sam_phi), np.sin(sam_phi)], [0, -np.sin(sam_phi), np.cos(sam_phi)]]) + Rx = np.array( + [ + [1, 0, 0], + [0, np.cos(sam_phi), np.sin(sam_phi)], + [0, -np.sin(sam_phi), np.cos(sam_phi)], + ] + ) - Rz = np.array([[np.cos(sam_chi), np.sin(sam_chi), 0], [-np.sin(sam_chi), np.cos(sam_chi), 0], [0, 0, 1]]) + Rz = np.array( + [ + [np.cos(sam_chi), np.sin(sam_chi), 0], + [-np.sin(sam_chi), np.cos(sam_chi), 0], + [0, 0, 1], + ] + ) Ry = np.array( - [[np.cos(sam_theta), 0, np.sin(sam_theta)], [0, 1, 0], [-np.sin(sam_theta), 0, np.cos(sam_theta)]] + [ + [np.cos(sam_theta), 0, np.sin(sam_theta)], + [0, 1, 0], + [-np.sin(sam_theta), 0, np.cos(sam_theta)], + ] ) Rxy = np.dot(Rx, Ry) return np.dot(Rxy, Rz) - def _generate_qxyz_map_SF_from_Lab(self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True): + def _generate_qxyz_map_SF_from_Lab( + self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True + ): """ Convert qmap from Lab frame to sample frame """ - self.Rot = self.rotation_matix(sam_phi, sam_theta, sam_chi, degrees=degrees) + self.Rot = self.rotation_matrix(sam_phi, sam_theta, sam_chi, degrees=degrees) qsx, qsy, qsz = np.dot(self.Rot, [np.ravel(qx), np.ravel(qy), np.ravel(qz)]) return qsx.reshape(qx.shape), qsy.reshape(qy.shape), qsz.reshape(qz.shape) @@ -584,19 +651,25 @@ def _generate_qxyz_maps_samFrame(self, degrees=True): Get lab frame qmap """ self._generate_qxyz_maps() - self.qx_map_lab_data, self.qy_map_lab_data, self.qz_map_lab_data = self._generate_qxyz_map_SF_from_Lab( - self.qx_map_data, - self.qy_map_data, - self.qz_map_data, - self.sam_phi, - self.sam_theta, - self.sam_chi, - degrees=degrees, + self.qx_map_lab_data, self.qy_map_lab_data, self.qz_map_lab_data = ( + self._generate_qxyz_map_SF_from_Lab( + self.qx_map_data, + self.qy_map_data, + self.qz_map_data, + self.sam_phi, + self.sam_theta, + self.sam_chi, + degrees=degrees, + ) + ) + self.qr_map_lab_data = np.sqrt( + np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) ) - self.qr_map_lab_data = np.sqrt(np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data)) self.q_map_lab_data = np.sqrt( - np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) + np.square(self.qz_map_lab_data) + np.square(self.qx_map_lab_data) + + np.square(self.qy_map_lab_data) + + np.square(self.qz_map_lab_data) ) def get_ratioDw(self): @@ -618,9 +691,9 @@ def angle_map(self): return self.angle_map_data - def _generate_qxyz_maps_no_offest(self): + def _generate_qxyz_maps_no_offset(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -639,10 +712,13 @@ def _generate_qxyz_maps_no_offest(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * ( - X_c * np.cos(phi_g) - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + X_c * np.cos(phi_g) + - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) ) qy_c = k_over_Dprime * ( - X_c * np.sin(phi_g) + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) - Dprime + X_c * np.sin(phi_g) + + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + - Dprime ) qz_c = -1 * k_over_Dprime * (d * np.sin(theta_g) + Y_c * np.cos(theta_g)) @@ -664,7 +740,9 @@ def _generate_qxyz_maps_no_offest(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -678,7 +756,7 @@ def _generate_qxyz_maps_no_offest(self): def _generate_qxyz_maps(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D YG add offset corrections at Sep 21, 2017 @@ -722,8 +800,14 @@ def _generate_qxyz_maps(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * (X_c * np.cos(phi_g) - np.sin(phi_g) * yprime + offset_x) - qy_c = k_over_Dprime * (X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime) - qz_c = -1 * k_over_Dprime * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + qy_c = k_over_Dprime * ( + X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime + ) + qz_c = ( + -1 + * k_over_Dprime + * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + ) qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c)) q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c)) @@ -750,7 +834,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data1 = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data1 = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data1 = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data1 = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data1 = np.sign(self.qx_map_data1) * np.sqrt( diff --git a/pyCHX/SAXS.py b/pyCHX/SAXS.py index fc2f54a..189ef25 100644 --- a/pyCHX/SAXS.py +++ b/pyCHX/SAXS.py @@ -5,9 +5,9 @@ """ # import numpy as np -from lmfit import Model, Parameter, Parameters, fit_report, minimize, report_fit -from scipy.optimize import curve_fit, least_squares, leastsq -from scipy.special import gamma, gammaln +from lmfit import Model, Parameters, minimize +from scipy.optimize import leastsq +from scipy.special import gamma from pyCHX.chx_generic_functions import find_index, plot1D, show_img @@ -62,7 +62,10 @@ def distribution_func(radius=1.0, sigma=0.1, num_points=20, spread=3, func="G"): spread = (1 - sigma) / sigma - 1 # print( num_points ) x, rs = np.linspace( - radius - radius * spread * sigma, radius + radius * spread * sigma, int(num_points), retstep=True + radius - radius * spread * sigma, + radius + radius * spread * sigma, + int(num_points), + retstep=True, ) # print(x) if func == "G": @@ -74,7 +77,14 @@ def distribution_func(radius=1.0, sigma=0.1, num_points=20, spread=3, func="G"): def poly_sphere_form_factor_intensity( - x, radius, sigma=0.1, delta_rho=1.00, background=0, num_points=20, spread=5, fit_func="G" + x, + radius, + sigma=0.1, + delta_rho=1.00, + background=0, + num_points=20, + spread=5, + fit_func="G", ): """ Input: @@ -82,7 +92,7 @@ def poly_sphere_form_factor_intensity( radius/R: in A sigma:sqrt root of variance in percent delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2 - fit_func: G: Guassian;S: Flory–Schulz distribution + fit_func: G: Gaussian;S: Flory–Schulz distribution Output: The form factor intensity of the polydispersed scatter """ @@ -94,7 +104,9 @@ def poly_sphere_form_factor_intensity( if sigma == 0: v = mono_sphere_form_factor_intensity(q, R, delta_rho) else: - r, rs, wt = distribution_func(radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func) + r, rs, wt = distribution_func( + radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func + ) for i, Ri in enumerate(r): # print(Ri, wt[i],delta_rho, rs) v += mono_sphere_form_factor_intensity(q, Ri, delta_rho) * wt[i] * rs @@ -114,7 +126,9 @@ def poly_sphere_form_factor_intensity_q2( The form factor intensity of the polydispersed scatter """ - return poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 # * scale + baseline + return ( + poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 + ) # * scale + baseline def find_index_old(x, x0, tolerance=None): @@ -139,7 +153,9 @@ def find_index_old(x, x0, tolerance=None): return position -def form_factor_residuals(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background @@ -156,7 +172,12 @@ def form_factor_residuals(p, iq, q, num_points=20, spread=5, fit_func="G", form_ spread=spread, fit_func=fit_func, ) - radius, sigma, delta_rho, background = abs(radius), abs(sigma), abs(delta_rho), abs(background) + radius, sigma, delta_rho, background = ( + abs(radius), + abs(sigma), + abs(delta_rho), + abs(background), + ) err = np.log(iq / fiq) return np.sqrt(np.abs(err)) @@ -194,7 +215,9 @@ def form_factor_residuals_bg( return np.sqrt(np.abs(err)) -def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -219,7 +242,9 @@ def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", return err -def form_factor_residuals_bg_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_bg_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -262,7 +287,7 @@ def get_form_factor_fit_lmfit( num_points=20, spread=5, *argv, - **kwargs + **kwargs, ): """ YG Dev@CHX 2019/8/1 @@ -286,7 +311,7 @@ def get_form_factor_fit_lmfit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -322,7 +347,10 @@ def get_form_factor_fit_lmfit( pars[var].vary = fit_variables[var] # print( pars ) result = minimize( - mod, pars, args=(iq_, q_), kws={"num_points": num_points, "spread": spread, "fit_func": fit_func} + mod, + pars, + args=(iq_, q_), + kws={"num_points": num_points, "spread": spread, "fit_func": fit_func}, ) fitp = {} fitpe = {} @@ -389,7 +417,7 @@ def get_form_factor_fit2( spread=5, bounds=None, *argv, - **kwargs + **kwargs, ): """ Fit form factor @@ -411,7 +439,7 @@ def get_form_factor_fit2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -504,7 +532,9 @@ def get_form_factor_fit2( ) if (len(iq_) > len(p)) and pcov is not None: - s_sq = (fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function)).sum() / (len(iq_) - len(p)) + s_sq = ( + fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function) + ).sum() / (len(iq_) - len(p)) pcov = pcov * s_sq else: pcov = np.inf @@ -531,7 +561,7 @@ def get_form_factor_fit( fit_func="G", fit_power=0, *argv, - **kwargs + **kwargs, ): """ Fit form factor for GUI @@ -554,7 +584,7 @@ def get_form_factor_fit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -566,7 +596,10 @@ def get_form_factor_fit( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -614,7 +647,9 @@ def get_form_factor_fit( return result, q_ -def plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs): +def plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs +): if res_pargs is not None: uid = res_pargs["uid"] path = res_pargs["path"] @@ -682,7 +717,7 @@ def fit_form_factor( fit_func="G", return_fig=False, *argv, - **kwargs + **kwargs, ): """ Fit form factor @@ -706,7 +741,7 @@ def fit_form_factor( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -714,9 +749,17 @@ def fit_form_factor( """ result, q_ = get_form_factor_fit( - q, iq, guess_values, fit_range=fit_range, fit_variables=fit_variables, function=function, fit_func=fit_func + q, + iq, + guess_values, + fit_range=fit_range, + fit_variables=fit_variables, + function=function, + fit_func=fit_func, + ) + plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig ) - plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig) return result @@ -731,7 +774,7 @@ def fit_form_factor2( function="poly_sphere", fit_func="G", *argv, - **kwargs + **kwargs, ): """ Fit form factor @@ -755,7 +798,7 @@ def fit_form_factor2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -780,7 +823,10 @@ def fit_form_factor2( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -922,7 +968,12 @@ def show_saxs_qmap( # elif w > maxW: # img_[ cx-w//2:cx+w//2, cy+w//2:cy+w//2 ] = - ROI = [max(0, center[0] - w), min(center[0] + w, lx), max(0, center[1] - w), min(ly, center[1] + w)] + ROI = [ + max(0, center[0] - w), + min(center[0] + w, lx), + max(0, center[1] - w), + min(ly, center[1] + w), + ] # print( ROI ) ax = plt.subplots() if not show_pixel: @@ -988,7 +1039,9 @@ def show_saxs_qmap( ##Fit sphere by scipy.leastsq fit -def fit_sphere_form_factor_func(parameters, ydata, xdata, yerror=None, nonvariables=None): +def fit_sphere_form_factor_func( + parameters, ydata, xdata, yerror=None, nonvariables=None +): """##Develop by YG at July 28, 2017 @CHX This function is for fitting form factor of polyderse spherical particles by using scipy.leastsq fit @@ -1063,7 +1116,17 @@ def plot_fit_sphere_form_factor(q, pq, res, p0=None, xlim=None, ylim=None): fig, ax = plt.subplots() if p0 is not None: - plot1D(x=q, y=fit_init, c="b", m="", ls="-", lw=3, ax=ax, logy=True, legend="Init_Fitting") + plot1D( + x=q, + y=fit_init, + c="b", + m="", + ls="-", + lw=3, + ax=ax, + logy=True, + legend="Init_Fitting", + ) plot1D(x=q, y=fit, c="r", m="", ls="-", lw=3, ax=ax, logy=True, legend="Fitting") plot1D( x=q, diff --git a/pyCHX/Stitching.py b/pyCHX/Stitching.py index e78bdd3..335346d 100644 --- a/pyCHX/Stitching.py +++ b/pyCHX/Stitching.py @@ -1,6 +1,4 @@ -import os import re -import sys import matplotlib.pyplot as plt import numpy as np @@ -19,14 +17,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -82,10 +82,10 @@ def Correct_Overlap_Images_Intensities( Return: data: array, stitched image with corrected intensity dataM: dict, each value is the image with correted intensity - scale: scale for each image, the first scale=1 by defination + scale: scale for each image, the first scale=1 by definition scale_smooth: smoothed scale - Exampe: + Example: data, dataM, scale,scale_smooth = Correct_Overlap_Images_Intensities( infiles, window_length=101, polyorder=5, overlap_width=58, badpixel_width =10 ) @@ -139,7 +139,9 @@ def Correct_Overlap_Images_Intensities( mode="mirror", cval=0.0, ) - data[:, a1:a2] = d[:, b1:b2] * np.repeat(scale_smooth[i], b2 - b1, axis=0).reshape([M, b2 - b1]) + data[:, a1:a2] = d[:, b1:b2] * np.repeat( + scale_smooth[i], b2 - b1, axis=0 + ).reshape([M, b2 - b1]) dataM[i] = np.zeros_like(dataM[i - 1]) dataM[i][:, 0 : w - ow] = dataM[i - 1][:, N - w : N - ow] dataM[i][:, w - ow :] = data[:, a1:a2] @@ -149,7 +151,15 @@ def Correct_Overlap_Images_Intensities( def check_overlap_scaling_factor(scale, scale_smooth, i=1, filename=None, save=False): """check_overlap_scaling_factor( scale,scale_smooth, i=1 )""" fig, ax = plt.subplots() - plot1D(scale[i], m="o", c="k", ax=ax, title="Scale_averaged_line_intensity_%s" % i, ls="", legend="Data") + plot1D( + scale[i], + m="o", + c="k", + ax=ax, + title="Scale_averaged_line_intensity_%s" % i, + ls="", + legend="Data", + ) plot1D( scale_smooth[i], ax=ax, @@ -163,10 +173,12 @@ def check_overlap_scaling_factor(scale, scale_smooth, i=1, filename=None, save=F fig.savefig(filename) -def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None): +def stitch_WAXS_in_Qspace( + dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None +): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit @@ -216,16 +228,22 @@ def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, dM = np.rot90(dataM[i].T) D = dM.ravel() phi = phis[i] - calibration.set_angles(det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz) + calibration.set_angles( + det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz + ) calibration.clear_maps() QZ = calibration.qz_map().ravel() # [pixel_list] QX = calibration.qx_map().ravel() # [pixel_list] bins = [num_qz, num_qx] rangeq = [[qz_min, qz_max], [qx_min, qx_max]] # Nov 7,2017 using new func to qmap - remesh_data, zbins, xbins = convert_Qmap(dM, QZ, QX, bins=bins, range=rangeq, mask=mask) + remesh_data, zbins, xbins = convert_Qmap( + dM, QZ, QX, bins=bins, range=rangeq, mask=mask + ) # Normalize by the binning - num_per_bin, zbins, xbins = convert_Qmap(np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask) + num_per_bin, zbins, xbins = convert_Qmap( + np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask + ) # remesh_data, zbins, xbins = np.histogram2d(QZ, QX, bins=bins, range=rangeq, normed=False, weights=D) # Normalize by the binning @@ -243,7 +261,6 @@ def plot_qmap_in_folder(inDir): """ import pickle as cpl - from pyCHX.chx_generic_functions import show_img from pyCHX.chx_libs import cmap_vge_hdr, plt fp = get_base_all_filenames(inDir, base_filename_cut_length=-10) @@ -279,7 +296,7 @@ def plot_qmap_in_folder(inDir): def get_qmap_range(calibration, phi_min, phi_max): """YG Sep 27@SMI Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -300,7 +317,9 @@ def get_qmap_range(calibration, phi_min, phi_max): return np.array([qx_start, qx_end, qz_start, qz_end]) -def get_phi(filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif"): +def get_phi( + filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif" +): pattern_re = "^.+\/?([a-zA-Z0-9_]+_)(\d\d\d\d\d\d)(\%s)$" % ext # print( pattern_re ) # pattern_re='^.+\/?([a-zA-Z0-9_]+_)(\d\d\d)(\.tif)$' @@ -337,7 +356,7 @@ def get_qmap_qxyz_range( ): """YG Nov 8, 2017@CHX Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -381,7 +400,11 @@ def get_qmap_qxyz_range( qy_end = np.min(calibration.qy_map_data) qz_end = np.min(calibration.qz_map_data) - return np.array([qx_start, qx_end]), np.array([qy_start, qy_end]), np.array([qz_start, qz_end]) + return ( + np.array([qx_start, qx_end]), + np.array([qy_start, qy_end]), + np.array([qz_start, qz_end]), + ) def stitch_WAXS_in_Qspace_CHX( @@ -403,8 +426,8 @@ def stitch_WAXS_in_Qspace_CHX( dq=0.0008, ): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit diff --git a/pyCHX/Two_Time_Correlation_Function.py b/pyCHX/Two_Time_Correlation_Function.py index a110211..015f11b 100644 --- a/pyCHX/Two_Time_Correlation_Function.py +++ b/pyCHX/Two_Time_Correlation_Function.py @@ -5,27 +5,23 @@ ###################################################################################### -import itertools -import sys import time -from datetime import datetime import matplotlib.pyplot as plt import numpy as np import skbeam.core.roi as roi from matplotlib import gridspec from matplotlib.colors import LogNorm -from modest_image import ModestImage, imshow +from modest_image import imshow from tqdm import tqdm # from pyCHX.chx_libs import colors_ as mcolors, markers_ as markers from pyCHX.chx_libs import RUN_GUI, Figure from pyCHX.chx_libs import colors from pyCHX.chx_libs import colors as colors_array -from pyCHX.chx_libs import lstyles from pyCHX.chx_libs import markers from pyCHX.chx_libs import markers as markers_array -from pyCHX.chx_libs import markers_copy, mcolors, multi_tau_lags +from pyCHX.chx_libs import multi_tau_lags def delays(num_lev=3, num_buf=4, time=1): @@ -53,7 +49,7 @@ def delays(num_lev=3, num_buf=4, time=1): class Get_Pixel_Array(object): """ Dec 16, 2015, Y.G.@CHX - a class to get intested pixels from a images sequence, + a class to get interested pixels from a images sequence, load ROI of all images into memory get_data: to get a 2-D array, shape as (len(images), len(pixellist)) @@ -71,12 +67,12 @@ def __init__(self, indexable, pixelist): # self.shape = indexable.shape try: self.length = len(indexable) - except: + except Exception: self.length = indexable.length def get_data(self): """ - To get intested pixels array + To get interested pixels array Return: 2-D array, shape as (len(images), len(pixellist)) """ @@ -95,7 +91,7 @@ def __init__(self, indexable, mask): self.mask = mask try: self.shape = indexable.shape - except: + except Exception: # if self.shape = [len(indexable), indexable[0].shape[0], indexable[0].shape[1]] # self.shape = indexable.shape @@ -165,19 +161,29 @@ def run_time(t0): print("Total time: %.2f min" % (elapsed_time / 60.0)) -def get_each_frame_ROI_intensity(data_pixel, bad_pixel_threshold=1e10, plot_=False, *argv, **kwargs): +def get_each_frame_ROI_intensity( + data_series, + bad_pixel_threshold=1e10, + plot_=False, + save=False, + sampling=1, + *argv, + **kwargs, +): """ Dec 16, 2015, Y.G.@CHX Get the ROI intensity of each frame Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: - imgsum, bad_frame_list = get_each_frame_intensity( data_pixel, + Usage: + imgsum, bad_frame_list = get_each_frame_intensity( data_series, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -239,8 +245,6 @@ def auto_two_Array(data, rois, data_pixel=None): noframes = data_pixel.shape[0] g12b = np.zeros([noframes, noframes, noqs]) - Unitq = noqs / 10 - proi = 0 for qi in tqdm(range(1, noqs + 1)): pixelist_qi = np.where(qind == qi)[0] @@ -250,7 +254,9 @@ def auto_two_Array(data, rois, data_pixel=None): sum1 = (np.average(data_pixel_qi, axis=1)).reshape(1, noframes) sum2 = sum1.T - g12b[:, :, qi - 1] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, qi - 1] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) # print ( proi, int( qi //( Unitq) ) ) # if int( qi //( Unitq) ) == proi: # sys.stdout.write("#") @@ -361,7 +367,7 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, a two correlation function, shape as ( imgs_length, imgs_length, noqs ) @@ -400,7 +406,9 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): return g2_aged -def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8): +def get_aged_g2_from_g12q( + g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8 +): """ @@ -410,7 +418,7 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ Dec 16, 2015, Y.G.@CHX Revised at April 19, 2017 Get one-time correlation function of different age from 1q-two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -472,11 +480,13 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ return lag_dict, g2_aged -def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1): +def get_aged_g2_from_g12q2( + g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1 +): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -501,7 +511,9 @@ def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slic arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) g2_aged = {} @@ -526,7 +538,7 @@ def show_g12q_aged_g2( uid="uid", path="", *argv, - **kwargs + **kwargs, ): """ Octo 20, 2017, add taus_aged option @@ -560,7 +572,9 @@ def show_g12q_aged_g2( age_center = np.array(list(sorted(g2_aged.keys()))) print("the cut age centers are: " + str(age_center)) - age_center = np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 # in pixel + age_center = ( + np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 + ) # in pixel M, N = g12q.shape # fig, ax = plt.subplots( figsize = (8,8) ) @@ -572,7 +586,14 @@ def show_g12q_aged_g2( # gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] ) gs = gridspec.GridSpec(1, 2) ax = plt.subplot(gs[0]) - im = imshow(ax, g12q, origin="lower", cmap="viridis", norm=LogNorm(vmin, vmax), extent=[0, N, 0, N]) + im = imshow( + ax, + g12q, + origin="lower", + cmap="viridis", + norm=LogNorm(vmin, vmax), + extent=[0, N, 0, N], + ) # plt.gca().set_xticks(ticks) ticks = np.round(plt.gca().get_xticks() * timeperframe, 2) @@ -586,13 +607,13 @@ def show_g12q_aged_g2( ax1 = plt.subplot(gs[1]) linS1 = [[0] * len(age_center), np.int_(age_center - slice_width // 2)] linS2 = [[0] * len(age_center), np.int_(age_center + slice_width // 2)] - linE1 = [np.int_(age_center - slice_width // 2), [0] * len(age_center)] - linE2 = [np.int_(age_center + slice_width // 2), [0] * len(age_center)] + # linE1 = [np.int_(age_center - slice_width // 2), [0] * len(age_center)] + # linE2 = [np.int_(age_center + slice_width // 2), [0] * len(age_center)] linC = [[0] * len(age_center), np.int_(age_center)] for i in range(len(age_center)): ps = linS1[1][i] - pe = linE1[0][i] + # pe = linE1[0][i] if ps >= N: s0 = ps - N s1 = N @@ -605,15 +626,12 @@ def show_g12q_aged_g2( # else:e0=pe;e1=0 ps = linS2[1][i] - pe = linE2[0][i] if ps >= N: S0 = ps - N S1 = N else: S0 = 0 S1 = ps - # if pe>=N:e0=N;E1=pe - N - # else:E0=pe;E1=0 E0 = S1 E1 = S0 @@ -624,8 +642,6 @@ def show_g12q_aged_g2( else: C0 = 0 C1 = ps - # if pe>=N:e0=N;E1=pe - N - # else:E0=pe;E1=0 D0 = C1 D1 = C0 @@ -636,10 +652,31 @@ def show_g12q_aged_g2( # lined= slice_width/2. #in data width # linewidth= (lined * (figh*72./N)) * 0.8 linewidth = 1 - ax.plot([s0, e0], [s1, e1], linewidth=linewidth, ls="--", alpha=1, color=colors_array[i]) - ax.plot([S0, E0], [S1, E1], linewidth=linewidth, ls="--", alpha=1, color=colors_array[i]) + ax.plot( + [s0, e0], + [s1, e1], + linewidth=linewidth, + ls="--", + alpha=1, + color=colors_array[i], + ) + ax.plot( + [S0, E0], + [S1, E1], + linewidth=linewidth, + ls="--", + alpha=1, + color=colors_array[i], + ) # print( i, [s0,e0],[s1,e1], [S0,E0],[S1,E1], colors_array[i] ) - ax.plot([C0, D0], [C1, D1], linewidth=linewidthc, ls="-", alpha=0.0, color=colors_array[i]) + ax.plot( + [C0, D0], + [C1, D1], + linewidth=linewidthc, + ls="-", + alpha=0.0, + color=colors_array[i], + ) # ax.set_title( '%s_frames'%(N) ) ax.set_title("%s_two_time" % uid) @@ -678,12 +715,10 @@ def show_g12q_aged_g2( # print( fp ) fig.savefig(fp, dpi=fig.dpi) - # plt.show() - def plot_aged_g2(g2_aged, tau=None, timeperframe=1, ylim=None, xlim=None): """'A plot of g2 calculated from two-time""" - fig = plt.figure(figsize=(8, 10)) + _ = plt.figure(figsize=(8, 10)) age_center = list(sorted(g2_aged.keys())) gs = gridspec.GridSpec(len(age_center), 1) for n, i in enumerate(age_center): @@ -694,7 +729,13 @@ def plot_aged_g2(g2_aged, tau=None, timeperframe=1, ylim=None, xlim=None): gx = tau[i] marker = markers[n] c = colors[n] - ax.plot(gx, g2_aged[i], "-%s" % marker, c=c, label=r"$age= %.1f s$" % (i * timeperframe)) + ax.plot( + gx, + g2_aged[i], + "-%s" % marker, + c=c, + label=r"$age= %.1f s$" % (i * timeperframe), + ) ax.set_xscale("log") ax.legend(fontsize="large", loc="best") ax.set_xlabel(r"$\tau $ $(s)$", fontsize=18) @@ -709,7 +750,9 @@ def plot_aged_g2(g2_aged, tau=None, timeperframe=1, ylim=None, xlim=None): # get fout-time -def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None): +def get_tau_from_g12q( + g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None +): """ Dec 16, 2015, Y.G.@CHX Get tau lines from two correlation function @@ -739,7 +782,9 @@ def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_ arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) tau = {} @@ -797,7 +842,14 @@ def show_g12q_taus(g12q, taus, slice_width=10, timeperframe=1, vmin=1, vmax=1.25 gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8], height_ratios=[8, 8]) ax = plt.subplot(gs[0]) ax1 = plt.subplot(gs[1]) - im = imshow(ax, g12q, origin="lower", cmap="viridis", norm=LogNorm(vmin=vmin, vmax=vmax), extent=[0, N, 0, N]) + im = imshow( + ax, + g12q, + origin="lower", + cmap="viridis", + norm=LogNorm(vmin=vmin, vmax=vmax), + extent=[0, N, 0, N], + ) linS = [] linE = [] @@ -818,7 +870,9 @@ def show_g12q_taus(g12q, taus, slice_width=10, timeperframe=1, vmin=1, vmax=1.25 for i in sorted(taus.keys()): gx = np.arange(len(taus[i])) * timeperframe marker = next(markers) - ax1.plot(gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe)) + ax1.plot( + gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe) + ) ax1.set_ylim(vmin, vmax) ax1.set_xlabel(r"$t (s)$", fontsize=5) ax1.set_ylabel("g2") @@ -862,15 +916,20 @@ def histogram_taus(taus, hisbin=20, plot=True, timeperframe=1): if plot: fig, ax1 = plt.subplots(figsize=(8, 8)) - ax1.set_title("Tau_histgram") + ax1.set_title("Tau_histogram") for key in sorted(his.keys()): tx = 0.5 * (his[key][1][:-1] + his[key][1][1:]) marker = next(markers) - ax1.plot(tx, his[key][0], "-%s" % marker, label=r"$tau= %.1f s$" % (key * timeperframe)) + ax1.plot( + tx, + his[key][0], + "-%s" % marker, + label=r"$tau= %.1f s$" % (key * timeperframe), + ) # ax1.set_ylim( 1.05,1.35 ) ax1.set_xlim(1.05, 1.35) ax1.set_xlabel(r"$g_2$", fontsize=19) - ax1.set_ylabel(r"histgram of g2 @ tau", fontsize=15) + ax1.set_ylabel(r"histogram of g2 @ tau", fontsize=15) # ax1.set_xscale('log') ax1.legend(fontsize="large", loc="best") # plt.show() @@ -887,7 +946,7 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -917,7 +976,9 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): yn = norms[:, q] yn1 = np.average(yn[tau:]) yn2 = np.average(yn[: m - tau]) - g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / (yn1 * yn2 * nopr[q]) + g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / ( + yn1 * yn2 * nopr[q] + ) return g2f12 @@ -926,7 +987,7 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -952,9 +1013,10 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): g2f12 = np.zeros([m, noqs]) for q in range(noqs): yn = norms[:, q] - g2f12[i, q] = np.array( + g2f12[:, q] = np.array( [ - np.nanmean(g12[:, :, q].diagonal(i)) / (np.average(yn[i:]) * np.average(yn[: m - i]) * nopr[q]) + np.nanmean(g12[:, :, q].diagonal(i)) + / (np.average(yn[i:]) * np.average(yn[: m - i]) * nopr[q]) for i in range(m) ] ) @@ -965,7 +1027,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): """ Dec 16, 2015, Y.G.@CHX Get four-time correlation function from two correlation function - namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion + namely, calculate the deviation of each diag line of g12 to get four-time correlation function TOBEDONE: deal with bad frames Parameters: @@ -973,7 +1035,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): Options: g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q) - one-time correlation fucntion, for normalization of the four-time + one-time correlation function, for normalization of the four-time rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end] Return: @@ -991,11 +1053,18 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): else: norm = 1.0 if rois is None: - g4f12 = np.array([(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)] + ) else: x1, x2, y1, y2 = rois - g4f12 = np.array([(np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [ + (np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm + for i in range(m) + ] + ) return g4f12 @@ -1057,7 +1126,14 @@ def masked_g12(g12, badframes_list): def show_one_C12( - C12, fig_ax=None, return_fig=False, interpolation="none", cmap="viridis", show_colorbar=True, *argv, **kwargs + C12, + fig_ax=None, + return_fig=False, + interpolation="none", + cmap="viridis", + show_colorbar=True, + *argv, + **kwargs, ): """ plot one-q of two-time correlation function @@ -1160,6 +1236,7 @@ def show_C12( C12, fig_ax=None, q_ind=1, + num_qr=None, return_fig=False, interpolation="none", cmap="viridis", @@ -1167,7 +1244,7 @@ def show_C12( qlabel=None, show_colorbar=True, *argv, - **kwargs + **kwargs, ): """ plot one-q of two-time correlation function @@ -1202,7 +1279,12 @@ def show_C12( if isinstance(q_ind, int): C12_num = q_ind - 1 else: - qz_ind, qr_ind = q_ind - 1 + if num_qr is None: + raise ValueError( + "num_qr must be provided when q_ind is a [qz_ind, qr_ind] pair" + ) + qz_ind, qr_ind = q_ind + qz_ind, qr_ind = qz_ind - 1, qr_ind - 1 C12_num = qz_ind * num_qr + qr_ind if "timeperframe" in kwargs.keys(): @@ -1253,7 +1335,9 @@ def show_C12( fig, ax = fig_ax # extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] - extent = np.array([N1, N2, N1, N2]) * timeperframe + timeoffset ### added timeoffset to extend + extent = ( + np.array([N1, N2, N1, N2]) * timeperframe + timeoffset + ) ### added timeoffset to extend if logs: im = imshow( @@ -1267,7 +1351,14 @@ def show_C12( ) else: im = imshow( - ax, data, origin="lower", cmap=cmap, vmin=vmin, vmax=vmax, interpolation=interpolation, extent=extent + ax, + data, + origin="lower", + cmap=cmap, + vmin=vmin, + vmax=vmax, + interpolation=interpolation, + extent=extent, ) if qlabel is not None: if isinstance(q_ind, int): @@ -1278,7 +1369,13 @@ def show_C12( if isinstance(q_ind, int): tit = "%s-[%s-%s] frames--" % (uid, N1, N2) + qstr else: - tit = "%s-[%s-%s] frames--Qzth= %s--Qrth= %s" % (uid, N1, N2, qz_ind, qr_ind) + tit = "%s-[%s-%s] frames--Qzth= %s--Qrth= %s" % ( + uid, + N1, + N2, + qz_ind, + qr_ind, + ) ax.set_title(tit) else: tit = "" diff --git a/pyCHX/XPCS_GiSAXS.py b/pyCHX/XPCS_GiSAXS.py index 8c57ff8..6f8269c 100644 --- a/pyCHX/XPCS_GiSAXS.py +++ b/pyCHX/XPCS_GiSAXS.py @@ -4,18 +4,19 @@ This module is for the GiSAXS XPCS analysis """ -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) from pyCHX.chx_compress import ( Multifile, compress_eigerdata, get_avg_imgc, - init_compress_eigerdata, - read_compressed_eigerdata, ) from pyCHX.chx_correlationc import cal_g2c from pyCHX.chx_generic_functions import * -from pyCHX.chx_libs import colors, colors_, markers, markers_ +from pyCHX.chx_libs import colors, markers def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None): @@ -23,9 +24,9 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) Get xpcs roi of gisaxs by giving Qr centers/edges, Qz centers/edges Parameters: qr_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ]. - each elment has two values for the start and end of one qr edge + each element has two values for the start and end of one qr edge qz_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ] - each elment has two values for the start and end of one qz edge + each element has two values for the start and end of one qz edge qr_map: two-d array, the same shape as gisaxs frame, a qr map qz_map: two-d array, the same shape as gisaxs frame, a qz map mask: array, the scattering mask @@ -44,14 +45,18 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) qz_center = 0.5 * (qz_edge[:, 0] + qz_edge[:, 1]) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -76,14 +81,18 @@ def get_gisaxs_roi(Qr, Qz, qr_map, qz_map, mask=None, qval_dict=None): qz_edge, qz_center = get_qedge(*Qz) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -116,7 +125,7 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_qr( avg_imgr, Qr, Qz, qr, qz, new_mask) """ @@ -133,7 +142,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -142,7 +153,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ = data * label_array_qzr qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -150,9 +163,13 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ave = np.interp(qr_ave_intp, qr_ave, data_ave) # columns.append( ['qr%s'%i, str(round(qzc_,4))] ) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # df = DataFrame( df ) # df.columns = np.concatenate( columns ) @@ -182,7 +199,7 @@ def cal_1d_qr( Dec 16, 2016, Y.G.@CHX calculate one-d of I(q) as a function of qr for different qz data: a dataframe - Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the defination of qr range (qr number does not matter) + Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the definition of qr range (qr number does not matter) Qz: info for qz, = qz_start, qz_end, qz_width , qz_num qr: qr-map qz: qz-map @@ -210,7 +227,7 @@ def cal_1d_qr( Qr = [qr_start , qr_end, qr_width, qr_num] Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask) @@ -232,7 +249,9 @@ def cal_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -247,7 +266,9 @@ def cal_1d_qr( qr_ave = (np.sum(qr_, axis=0))[w] / roi_pixel_num[w] data_ave = (np.sum(data_, axis=0))[w] / roi_pixel_num[w] - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) columns.append(["qr"]) @@ -257,7 +278,9 @@ def cal_1d_qr( # qr_1d[i]= [qr_ave_intp, data_ave] columns.append(["qz%s=%s" % (i, str(round(qzc_, 4)))]) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: df = np.hstack([df, data_ave.reshape(N_interp, 1)]) df = DataFrame(df) @@ -271,11 +294,26 @@ def cal_1d_qr( filename = os.path.join(path, "%s_qr_1d.csv" % (uid)) df.to_csv(filename) if print_save_message: - print("The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid) + ) return df -def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, save=True, *argv, **kwargs): +def get_t_qrc( + FD, + frame_edge, + Qr, + Qz, + qr, + qz, + mask=None, + path=None, + uid=None, + save=True, + *argv, + **kwargs, +): """Get t-dependent qr Parameters @@ -327,11 +365,15 @@ def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, sa uid = setup_pargs["uid"] filename = os.path.join(path, "%s_qrt_pds.csv" % (uid)) qrt_pds.to_csv(filename) - print("The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid)) + print( + "The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid) + ) return qrt_pds -def plot_qrt_pds(qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs): +def plot_qrt_pds( + qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs +): """Y.G. Jan 04, 2017 plot t-dependent qr @@ -388,7 +430,7 @@ def plot_t_qrc(qr_1d, frame_edge, save=False, pargs=None, fontsize=8, *argv, **k qr_1d: array, with shape as time length, frame_edge frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ] save: save the plot - if save, all the following paramters are given in argv + if save, all the following parameters are given in argv { 'path': 'uid': } @@ -458,7 +500,9 @@ def make_gisaxs_grid(qr_w=10, qz_w=12, dim_r=100, dim_z=120): ########################################### -def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum"): +def convert_Qmap( + img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum" +): """Y.G. Nov 3@CHX Convert a scattering image to a qmap by giving qx_map and qy_map Return converted qmap, x-coordinates and y-coordinates @@ -475,9 +519,18 @@ def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, st else: m = None b2d = BinnedStatistic2D( - qx_map.ravel(), qy_map.ravel(), statistic=statistic, bins=bins, mask=m, range=rangeq + qx_map.ravel(), + qy_map.ravel(), + statistic=statistic, + bins=bins, + mask=m, + range=rangeq, + ) + remesh_data, xbins, ybins = ( + b2d(img.ravel()), + b2d.bin_centers[0], + b2d.bin_centers[1], ) - remesh_data, xbins, ybins = b2d(img.ravel()), b2d.bin_centers[0], b2d.bin_centers[1] else: if rangeq is None: qx_min, qx_max = qx_map.min(), qx_map.max() @@ -515,7 +568,14 @@ def get_refl_xy(inc_ang, inc_phi, inc_x0, inc_y0, pixelsize=[0.075, 0.075], Lsd= def get_alphaf_thetaf( - inc_x0, inc_y0, inc_ang, inc_phi=0, pixelsize=[0.075, 0.075], Lsd=5000, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + inc_ang, + inc_phi=0, + pixelsize=[0.075, 0.075], + Lsd=5000, + dimx=2070.0, + dimy=2167.0, ): """Nov 19, 2018@SMI to get alphaf and thetaf for gi scattering Input: @@ -543,7 +603,7 @@ def convert_gisaxs_pixel_to_q2( alphaf, thetaf, phi=0, - lamda=1.0, + lambda=1.0, thetai=0.0, ): """ @@ -558,7 +618,7 @@ def convert_gisaxs_pixel_to_q2( get: q_parallel (qp), q_direction_z (qz) """ - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda alphai = np.radians(inc_ang) thetai = np.radians(thetai) phi = np.radians(phi) @@ -585,7 +645,9 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls Lsd = Lsd / 1000.0 px, py = pixelsize - phi = np.arctan2((-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6)) + phi = np.arctan2( + (-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6) + ) alphai = np.arctan2((refl_y0 - inc_y0) * py * 10 ** (-6), Lsd) / 2.0 # thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #?? @@ -593,7 +655,15 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls def get_reflected_angles( - inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + thetai=0.0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, ): """Dec 16, 2015, Y.G.@CHX giving: incident beam center: bcenx,bceny @@ -619,7 +689,16 @@ def get_reflected_angles( def convert_gisaxs_pixel_to_q( - inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0, thetai=0.0, lamda=1.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, + thetai=0.0, + lambda=1.0, ): """ Dec 16, 2015, Y.G.@CHX @@ -636,7 +715,7 @@ def convert_gisaxs_pixel_to_q( alphaf, thetaf, alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd, dimx, dimy ) - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda qx = np.cos(alphaf) * np.cos(2 * thetaf) - np.cos(alphai) * np.cos(2 * thetai) qy_ = np.cos(alphaf) * np.sin(2 * thetaf) - np.cos(alphai) * np.sin(2 * thetai) qz_ = np.sin(alphaf) + np.sin(alphai) @@ -655,7 +734,6 @@ def get_qedge(qstart, qend, qwidth, noqs, verbose=True): return a qedge by giving the noqs, qstart,qend,qwidth. a qcenter, which is center of each qedge KEYWORD: None""" - import numpy as np if noqs != 1: spacing = (qend - qstart - noqs * qwidth) / (noqs - 1) # spacing between rings @@ -735,13 +813,23 @@ def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center): for i, label in enumerate(uqzr): # print (i, label) - label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[i] + label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[ + i + ] return np.int_(label_array_qzr_), np.array(qzc), np.concatenate(np.array(qrc)) def show_label_array_on_image( - ax, image, label_array, cmap=None, norm=None, log_img=True, alpha=0.3, imshow_cmap="gray", **kwargs + ax, + image, + label_array, + cmap=None, + norm=None, + log_img=True, + alpha=0.3, + imshow_cmap="gray", + **kwargs, ): # norm=LogNorm(), """ This will plot the required ROI's(labeled array) on the image @@ -771,9 +859,13 @@ def show_label_array_on_image( """ ax.set_aspect("equal") if log_img: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs + ) # norm=norm, else: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs + ) # norm=norm, im_label = mpl_plot.show_label_array( ax, label_array, cmap=cmap, norm=norm, alpha=alpha, **kwargs @@ -784,7 +876,7 @@ def show_label_array_on_image( def show_qz(qz): """Dec 16, 2015, Y.G.@CHX - plot qz mape + plot qz map """ @@ -797,7 +889,7 @@ def show_qz(qz): def show_qr(qr): """Dec 16, 2015, Y.G.@CHX - plot qr mape + plot qr map """ fig, ax = plt.subplots() @@ -811,12 +903,14 @@ def show_alphaf( alphaf, ): """Dec 16, 2015, Y.G.@CHX - plot alphaf mape + plot alphaf map """ fig, ax = plt.subplots() - im = ax.imshow(alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5) + im = ax.imshow( + alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5 + ) # im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00)) fig.colorbar(im) ax.set_title("alphaf") @@ -877,7 +971,7 @@ def get_1d_qr( Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8) @@ -898,7 +992,9 @@ def get_1d_qr( if show_roi: label_array_qz0 = get_qmap_label(qz, qz_edge) - label_array_qzr0, qzc0, qrc0 = get_qzrmap(label_array_qz0, label_array_qr, qz_center, qr_center) + label_array_qzr0, qzc0, qrc0 = get_qzrmap( + label_array_qz0, label_array_qr, qz_center, qr_center + ) if mask is not None: label_array_qzr0 *= mask @@ -912,7 +1008,9 @@ def get_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -922,7 +1020,9 @@ def get_1d_qr( qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -937,9 +1037,13 @@ def get_1d_qr( else: ax.plot(qr_ave_intp, data_ave, "--o", label="qz= %f" % qzc_) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # ax.set_xlabel( r'$q_r$', fontsize=15) ax.set_xlabel(r"$q_r$" r"($\AA^{-1}$)", fontsize=18) @@ -960,7 +1064,9 @@ def get_1d_qr( # filename = os.path.join(path, 'qr_1d-%s-%s.csv' % (uid,CurTime)) filename = os.path.join(path, "uid=%s--qr_1d.csv" % (uid)) df.to_csv(filename) - print("The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid) + ) # fp = path + 'Uid= %s--Circular Average'%uid + CurTime + '.png' fp = path + "uid=%s--qr_1d-" % uid + ".png" @@ -1082,13 +1188,17 @@ def get_qr_tick_label(qr, label_array_qr, inc_x0, interp=True): rticks_label = np.array(rticks_label) try: w = np.where(rticks <= inc_x0)[0] - rticks1 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks1 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks_label1 = np.round(rticks_label[w], 3) except: rticks_label1 = [] try: w = np.where(rticks > inc_x0)[0] - rticks2 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks2 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks = np.append(rticks1, rticks2) rticks_label2 = np.round(rticks_label[w], 3) except: @@ -1134,7 +1244,17 @@ def get_qz_tick_label(qz, label_array_qz, interp=True): return zticks, zticks_label -def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_label=True, *argv, **kwargs): +def get_qzr_map( + qr, + qz, + inc_x0, + Nzline=10, + Nrline=10, + interp=True, + return_qrz_label=True, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX Calculate a qzr map of a gisaxs image (data) without plot @@ -1154,16 +1274,20 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space else: return the additional two below - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image Examples: ticks = get_qzr_map( qr, qz, inc_x0 ) """ qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1175,18 +1299,41 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) ticks = [zticks, zticks_label, rticks, rticks_label] if return_qrz_label: - return zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz + return ( + zticks, + zticks_label, + rticks, + rticks_label, + label_array_qr, + label_array_qz, + ) else: return zticks, zticks_label, rticks, rticks_label -def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin=0.001, vmax=1e1, *argv, **kwargs): +def plot_qzr_map( + qr, + qz, + inc_x0, + ticks=None, + data=None, + uid="uid", + path="", + vmin=0.001, + vmax=1e1, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1201,8 +1348,8 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin zticks_label: list, z-tick positions in unit of real space rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image inc_x0: the incident beam center x Options: @@ -1225,11 +1372,13 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin import matplotlib.pyplot as plt if ticks is None: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = get_qzr_map( - qr, qz, inc_x0, return_qrz_label=True + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + get_qzr_map(qr, qz, inc_x0, return_qrz_label=True) ) else: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ticks + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + ticks + ) cmap = "viridis" _cmap = copy.copy((mcm.get_cmap(cmap))) @@ -1239,7 +1388,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1268,7 +1419,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin fig.savefig(fp, dpi=fig.dpi) -def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs): +def show_qzr_map( + qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs +): """ Dec 16, 2015, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1308,8 +1461,12 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1325,7 +1482,9 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1334,7 +1493,7 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * label_array_qz, origin="lower", cmap="viridis", vmin=0.5, vmax=None ) # ,interpolation='nearest',) - # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, heigth + # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, height # cba = fig.colorbar(im, cax=caxr ) # cba = fig.colorbar(im, fraction=0.046, pad=0.04) @@ -1351,9 +1510,13 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) stride = 1 @@ -1400,7 +1563,7 @@ def show_qzr_roi( save=False, return_fig=False, *argv, - **kwargs + **kwargs, ): """ Dec 16, 2015, Y.G.@CHX @@ -1648,7 +1811,9 @@ def plot_gisaxs_g2(g2, taus, res_pargs=None, one_plot=False, *argv, **kwargs): # plot g2 results -def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs): +def plot_gisaxs_two_g2( + g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX plot g2 results, g2: one-time correlation function from a multi-tau method @@ -1804,7 +1969,9 @@ def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *ar # plt.show() -def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs): +def save_gisaxs_g2( + g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs +): """ Aug 8, 2016, Y.G.@CHX save g2 results, @@ -1843,14 +2010,23 @@ def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *a if filename is None: if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) else: filename = os.path.join(path, filename) df.to_csv(filename) - print("The correlation function of uid= %s is saved with filename as %s" % (uid, filename)) + print( + "The correlation function of uid= %s is saved with filename as %s" + % (uid, filename) + ) def stretched_auto_corr_scat_factor(x, beta, relaxation_rate, alpha=1.0, baseline=1): @@ -1861,7 +2037,9 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): return beta * np.exp(-2 * relaxation_rate * x) + baseline -def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs): +def fit_gisaxs_g2( + g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs +): """ July 20,2016, Y.G.@CHX Fit one-time correlation function @@ -1886,12 +2064,12 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1921,7 +2099,9 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) @@ -1966,9 +2146,16 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, baseline_ = kwargs["guess_values"]["baseline"] else: baseline_ = 1.0 - pars = mod.make_params(beta=beta_, alpha=alpha_, relaxation_rate=relaxation_rate_, baseline=baseline_) + pars = mod.make_params( + beta=beta_, + alpha=alpha_, + relaxation_rate=relaxation_rate_, + baseline=baseline_, + ) else: - pars = mod.make_params(beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0) + pars = mod.make_params( + beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0 + ) for v in _vars: pars["%s" % v].vary = False @@ -2034,12 +2221,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ax.set_xlim(kwargs["xlim"]) txts = r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$\alpha$" + r"$ = %.3f$" % (alpha[i]) # txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' - ax.text(x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$baseline$" + r"$ = %.3f$" % (baseline[i]) - ax.text(x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) fp = path + "uid=%s--g2-qz=%s--fit" % (uid, qz_center[qz_ind]) + ".png" @@ -2105,8 +2298,20 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, # print( result1.best_values['relaxation_rate'], result1.best_values['beta'] ) - txts = r"$q_z$" + r"$_%s$" % qz_ind + r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3 - 0.1 * qz_ind, s=txts, fontsize=14, transform=ax.transAxes) + txts = ( + r"$q_z$" + + r"$_%s$" % qz_ind + + r"$\tau$" + + r"$ = %.3f$" % (1 / rate[i]) + + r"$ s$" + ) + ax.text( + x=0.02, + y=0.55 + 0.3 - 0.1 * qz_ind, + s=txts, + fontsize=14, + transform=ax.transAxes, + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -2144,14 +2349,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ############################### -def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs): +def get_each_box_mean_intensity( + data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX get each box (ROI) mean intensity as a function of time """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), box_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), box_mask + ) try: N = len(data_series) except: @@ -2166,7 +2375,13 @@ def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, p ax.set_title("uid= %s--Mean intensity of each box" % uid) for i in range(num_rings): - ax.plot(times[::sampling], mean_int_sets[:, i], label="Box " + str(i + 1), marker="o", ls="-") + ax.plot( + times[::sampling], + mean_int_sets[:, i], + label="Box " + str(i + 1), + marker="o", + ls="-", + ) ax.set_xlabel("Time") ax.set_ylabel("Mean Intensity") ax.legend() @@ -2250,14 +2465,22 @@ def fit_qr_qz_rate(qr, qz, rate, plot_=True, *argv, **kwargs): ax.plot(x**power, res[i].best_fit, "-r") txts = r"$D0: %.3e$" % D0[i] + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) legend = ax.legend(loc="best") ax.set_ylabel("Relaxation rate " r"$\gamma$" "($s^{-1}$)") ax.set_xlabel("$q^%s$" r"($\AA^{-2}$)" % power) dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) # fp = path + 'Q%s-Rate--uid=%s'%(power,uid) + CurTime + '--Fit.png' fp = path + "uid=%s--Q-Rate" % (uid) + "--fit-.png" fig.savefig(fp, dpi=fig.dpi) @@ -2503,7 +2726,9 @@ def multi_uids_gisaxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms Ldet = md["detector_distance"] # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs exposuretime = md["count_time"] @@ -2512,7 +2737,12 @@ def multi_uids_gisaxs_xpcs_analysis( # timeperframe = exposuretime#for visiblitly # timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata.... setup_pargs = dict( - uid=uid, dpix=dpix, Ldet=Ldet, lambda_=lambda_, timeperframe=timeperframe, path=data_dir + uid=uid, + dpix=dpix, + Ldet=Ldet, + lambda_=lambda_, + timeperframe=timeperframe, + path=data_dir, ) md["avg_img"] = avg_imgr @@ -2527,17 +2757,34 @@ def multi_uids_gisaxs_xpcs_analysis( else: good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) if not para_run: g2, lag_steps_ = cal_g2c( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) else: g2, lag_steps_ = cal_g2p( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) if len(lag_steps) < len(lag_steps_): @@ -2550,7 +2797,11 @@ def multi_uids_gisaxs_xpcs_analysis( good_start = 0 good_series = apply_mask(imgsar[good_start:], maskr) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2559,14 +2810,25 @@ def multi_uids_gisaxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, box_maskr, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + box_maskr, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, qz_center=qz_center, qr_center=qr_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, + qz_center=qz_center, + qr_center=qr_center, + path=data_dir_, + uid=uid, + ) save_gisaxs_g2(g2, res_pargs) # plot_gisaxs_g2( g2, taus, vlim=[0.95, 1.1], res_pargs=res_pargs, one_plot=True) @@ -2576,14 +2838,33 @@ def multi_uids_gisaxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.1], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.229, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.229, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, one_plot=True, ) - fit_qr_qz_rate(qr_center, qz_center, fit_result, power_variable=False, uid=uid, path=data_dir_) + fit_qr_qz_rate( + qr_center, + qz_center, + fit_result, + power_variable=False, + uid=uid, + path=data_dir_, + ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 diff --git a/pyCHX/XPCS_SAXS.py b/pyCHX/XPCS_SAXS.py index e910c8c..2936781 100644 --- a/pyCHX/XPCS_SAXS.py +++ b/pyCHX/XPCS_SAXS.py @@ -7,21 +7,16 @@ import os from pandas import DataFrame -from scipy.special import erf from pyCHX.chx_compress_analysis import ( Multifile, compress_eigerdata, get_avg_imgc, - get_each_ring_mean_intensityc, - init_compress_eigerdata, - mean_intensityc, - read_compressed_eigerdata, ) -from pyCHX.chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq +from pyCHX.chx_correlationc import cal_g2c, get_pixelist_interp_iq from pyCHX.chx_correlationp import cal_g2p from pyCHX.chx_generic_functions import * -from pyCHX.chx_libs import RUN_GUI, Figure, colors, colors_, colors_copy, markers, markers_, markers_copy +from pyCHX.chx_libs import RUN_GUI, Figure, colors, markers def get_iq_invariant(qt, iqst): @@ -111,9 +106,9 @@ def recover_img_from_iq(qp, iq, center, mask): return img_ -def get_cirucular_average_std(img, mask, setup_pargs, img_name="xx"): +def get_circular_average_std(img, mask, setup_pargs, img_name="xx"): """YG. develop at CHX, 2017 July 18, - Get the standard devation of tge circular average of img + Get the standard deviation of the circular average of img image-->I(q)-->image_mean--> (image- image_mean)**2 --> I(q) --> std = sqrt(I(q)) """ qp, iq, q = get_circular_average(img, mask, pargs=setup_pargs, save=False) @@ -132,7 +127,15 @@ def get_delta_img(img, mask, setup_pargs, img_name="xx", plot=False): img_ = recover_img_from_iq(qp, iq, center, mask) delta = img - img_ * img.mean() / img_.mean() if plot: - show_img(delta, logs=True, aspect=1, cmap=cmap_albula, vmin=1e-5, vmax=10**1, image_name=img_name) + show_img( + delta, + logs=True, + aspect=1, + cmap=cmap_albula, + vmin=1e-5, + vmax=10**1, + image_name=img_name, + ) return delta @@ -167,7 +170,9 @@ def combine_ring_anglar_mask(ring_mask, ang_mask): return np.int_(ring_ang_) -def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -194,7 +199,9 @@ def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, ce return seg_mask, qval_dict -def get_seg_dict_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_dict_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -247,7 +254,11 @@ def combine_two_roi_mask(ring_mask, ang_mask, pixel_num_thres=10): for i, ind in enumerate(ruiq[1:]): ring_mask_.ravel()[np.where(rf == ind)[0]] = maxa * i - new_mask = (ring_mask_ + ang_mask) * np.array(ring_mask, dtype=bool) * np.array(ang_mask, dtype=bool) + new_mask = ( + (ring_mask_ + ang_mask) + * np.array(ring_mask, dtype=bool) + * np.array(ang_mask, dtype=bool) + ) qind, pixelist = roi.extract_label_indices(new_mask) noqs = len(np.unique(qind)) @@ -333,7 +344,14 @@ def bin_1D(x, y, nx=None, min_x=None, max_x=None): def circular_average( - image, calibrated_center, threshold=0, nx=None, pixel_size=(1, 1), min_x=None, max_x=None, mask=None + image, + calibrated_center, + threshold=0, + nx=None, + pixel_size=(1, 1), + min_x=None, + max_x=None, + mask=None, ): """Circular average of the the image data The circular average is also known as the radial integration @@ -417,7 +435,7 @@ def get_circular_average( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """get a circular average of an image Parameters @@ -435,8 +453,8 @@ def get_circular_average( number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -447,10 +465,22 @@ def get_circular_average( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] qp, iq = circular_average( - avg_img, center, threshold=0, nx=nx, pixel_size=(dpix, dpix), mask=mask, min_x=min_x, max_x=max_x + avg_img, + center, + threshold=0, + nx=nx, + pixel_size=(dpix, dpix), + mask=mask, + min_x=min_x, + max_x=max_x, ) qp_ = qp * dpix # convert bin_centers from r [um] to two_theta and then to q [1/px] (reciprocal space) @@ -492,12 +522,23 @@ def get_circular_average( fig.savefig(fp, dpi=fig.dpi) if save: path = pargs["path"] - save_lists([q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path) + save_lists( + [q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path + ) return qp, iq, q def plot_circular_average( - qp, iq, q, pargs, show_pixel=False, loglog=False, save=True, return_fig=False, *argv, **kwargs + qp, + iq, + q, + pargs, + show_pixel=False, + loglog=False, + save=True, + return_fig=False, + *argv, + **kwargs, ): if RUN_GUI: fig = Figure() @@ -546,7 +587,18 @@ def plot_circular_average( return fig -def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False, save=False, *argv, **kwargs): +def get_angular_average( + avg_img, + mask, + pargs, + min_r, + max_r, + nx=3600, + plot_=False, + save=False, + *argv, + **kwargs, +): """get a angular average of an image Parameters ---------- @@ -563,8 +615,8 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -575,11 +627,22 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] angq, ang = angular_average( - avg_img, calibrated_center=center, pixel_size=(dpix, dpix), nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + calibrated_center=center, + pixel_size=(dpix, dpix), + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -664,7 +727,8 @@ def angular_average( min_r = 0 if max_r is None: max_r = np.sqrt( - (image.shape[0] - calibrated_center[0]) ** 2 + (image.shape[1] - calibrated_center[1]) ** 2 + (image.shape[0] - calibrated_center[0]) ** 2 + + (image.shape[1] - calibrated_center[1]) ** 2 ) r_mask = make_ring_mask(calibrated_center, image.shape, min_r, max_r) @@ -679,7 +743,9 @@ def angular_average( bina = np.ravel(angle_val) image_mask = np.ravel(image * r_mask) - bin_edges, sums, counts = utils.bin_1D(bina, image_mask, nx, min_x=min_x, max_x=max_x) + bin_edges, sums, counts = utils.bin_1D( + bina, image_mask, nx, min_x=min_x, max_x=max_x + ) # print (counts) th_mask = counts > threshold @@ -690,7 +756,18 @@ def angular_average( return bin_centers * 180 / np.pi, ang_averages -def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, show_progress=True, *argv, **kwargs): +def get_t_iqc( + FD, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + show_progress=True, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -702,7 +779,7 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- qp: q in pixel @@ -716,7 +793,9 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho for i in range(Nt): t1, t2 = frame_edge[i] # print (t1,t2) - avg_img = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + avg_img = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) qp, iqs[i], q = get_circular_average(avg_img, mask, pargs, nx=nx, plot_=False) if plot_: @@ -762,7 +841,16 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho def get_t_iqc_imstack( - imgs, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, show_progress=True, *argv, **kwargs + imgs, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + show_progress=True, + *argv, + **kwargs, ): """ Get t-dependent Iq @@ -779,7 +867,7 @@ def get_t_iqc_imstack( nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- qp: q in pixel @@ -838,7 +926,17 @@ def get_t_iqc_imstack( return qp, np.array(iqs), q -def plot_t_iqc(q, iqs, frame_edge, pargs, save=True, return_fig=False, legend_size=None, *argv, **kwargs): +def plot_t_iqc( + q, + iqs, + frame_edge, + pargs, + save=True, + return_fig=False, + legend_size=None, + *argv, + **kwargs, +): """Plot t-dependent Iq Parameters @@ -916,7 +1014,17 @@ def calc_q(L, a, wv): return q -def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, *argv, **kwargs): +def get_t_iq( + data_series, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -928,7 +1036,7 @@ def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=Fa nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -992,7 +1100,7 @@ def get_t_ang( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """Get t-dependent angule intensity @@ -1017,7 +1125,7 @@ def get_t_ang( nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -1034,7 +1142,13 @@ def get_t_ang( # print (t1,t2) avg_img = get_avg_img(data_series[t1:t2], sampling=1, plot_=False) qp, iqs[i] = angular_average( - avg_img, center, pixel_size=pixel_size, nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + center, + pixel_size=pixel_size, + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -1124,7 +1238,7 @@ def _make_roi(coords, edges, shape): def angulars(edges, center, shape): """ - Draw annual (angluar-shaped) shaped regions of interest. + Draw annual (angular-shaped) shaped regions of interest. Each ring will be labeled with an integer. Regions outside any ring will be filled with zeros. Parameters @@ -1148,7 +1262,8 @@ def angulars(edges, center, shape): edges = np.atleast_2d(np.asarray(edges)).ravel() if not 0 == len(edges) % 2: raise ValueError( - "edges should have an even number of elements, " "giving inner, outer radii for each angular" + "edges should have an even number of elements, " + "giving inner, outer radii for each angular" ) if not np.all(np.diff(edges) > 0): raise ValueError( @@ -1181,9 +1296,9 @@ def update_angular_mask_width_edge(edge, mask, center, roi_mask): return roi_mask -def fix_angle_mask_at_PN_180(edge, mask, center, roi_mask): +def fix_angle_mask_at_ON_180(edge, mask, center, roi_mask): """YG Dev@CHX May, 2019 - to fix the problem of making angluar mask at the angle edge around +/- 180 + to fix the problem of making angular mask at the angle edge around +/- 180 Input: edge: the edge of the anglues mask: the mask of the image @@ -1269,7 +1384,9 @@ def get_angular_mask( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1283,18 +1400,18 @@ def get_angular_mask( edges2 = edges - 180 for edge_ in [edges2]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) if flow_angle is not None: edges3 = 2 * flow_angle - edges[:, ::-1] edges4 = 2 * flow_angle - edges[:, ::-1] - 180 for edge_ in [edges3, edges4]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) else: # for i, edge_ in enumerate( edges ): # print(edge_) if fix_180_angle: - ang_mask = fix_angle_mask_at_PN_180(edges, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edges, mask, center, ang_mask) labels, indices = roi.extract_label_indices(ang_mask) nopr = np.bincount(np.array(labels, dtype=int))[1:] if len(np.where(nopr == 0)[0] != 0): @@ -1343,7 +1460,9 @@ def get_angular_mask_old( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1437,7 +1556,12 @@ def get_ring_mask( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) # spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings # qc = np.int_( np.linspace( inner_radius,outer_radius, num_rings ) ) @@ -1450,7 +1574,9 @@ def get_ring_mask( # find the edges of the required rings if edges is None: if num_rings != 1: - spacing = (outer_radius - inner_radius - num_rings * width) / (num_rings - 1) # spacing between rings + spacing = (outer_radius - inner_radius - num_rings * width) / ( + num_rings - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_radius, width, spacing, num_rings) @@ -1601,7 +1727,15 @@ def show_ring_ang_roi(data, rois, alpha=0.3, save=False, *argv, **kwargs): def plot_qIq_with_ROI( - q, iq, q_ring_center, q_ring_edge=None, logs=True, save=False, return_fig=False, *argv, **kwargs + q, + iq, + q_ring_center, + q_ring_edge=None, + logs=True, + save=False, + return_fig=False, + *argv, + **kwargs, ): """Aug 6, 2016, Y.G.@CHX Update@2019, March to make a span plot with q_ring_edge @@ -1655,12 +1789,21 @@ def plot_qIq_with_ROI( def get_each_ring_mean_intensity( - data_series, ring_mask, sampling, timeperframe, plot_=True, save=False, *argv, **kwargs + data_series, + ring_mask, + sampling, + timeperframe, + plot_=True, + save=False, + *argv, + **kwargs, ): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), ring_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), ring_mask + ) times = np.arange(len(data_series)) * timeperframe # get the time for each frame num_rings = len(np.unique(ring_mask)[1:]) @@ -1691,7 +1834,9 @@ def get_each_ring_mean_intensity( # plot g2 results -def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs): +def plot_saxs_rad_ang_g2( + g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs +): """plot g2 results of segments with radius and angle partation , g2: one-time correlation function @@ -1813,7 +1958,13 @@ def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, retu def fit_saxs_rad_ang_g2( - g2, res_pargs=None, function="simple_exponential", fit_range=None, master_angle_plot=False, *argv, **kwargs + g2, + res_pargs=None, + function="simple_exponential", + fit_range=None, + master_angle_plot=False, + *argv, + **kwargs, ): """ Fit one-time correlation function @@ -1830,7 +1981,7 @@ def fit_saxs_rad_ang_g2( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline #fit_vibration: @@ -1838,7 +1989,7 @@ def fit_saxs_rad_ang_g2( Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1902,13 +2053,17 @@ def fit_saxs_rad_ang_g2( if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) @@ -1933,7 +2088,9 @@ def fit_saxs_rad_ang_g2( _alpha = _guess_val["alpha"] _relaxation_rate = _guess_val["relaxation_rate"] _baseline = _guess_val["baseline"] - pars = mod.make_params(beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline) + pars = mod.make_params( + beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline + ) if function == "flow_para_function" or function == "flow_para": _flow_velocity = _guess_val["flow_velocity"] @@ -1949,7 +2106,12 @@ def fit_saxs_rad_ang_g2( _freq = _guess_val["freq"] _amp = _guess_val["amp"] pars = mod.make_params( - beta=_beta, alpha=_alpha, freq=_freq, amp=_amp, relaxation_rate=_relaxation_rate, baseline=_baseline + beta=_beta, + alpha=_alpha, + freq=_freq, + amp=_amp, + relaxation_rate=_relaxation_rate, + baseline=_baseline, ) for v in _vars: @@ -2044,7 +2206,9 @@ def fit_saxs_rad_ang_g2( if function == "flow_para_function" or function == "flow_para": txts = r"$flow_v$" + r"$ = %.3f$" % (flow[i]) - ax.text(x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -2063,7 +2227,9 @@ def fit_saxs_rad_ang_g2( result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) if function == "flow_para_function" or function == "flow_para": - result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow) + result = dict( + beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow + ) if function == "stretched_vibration": result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, freq=freq) @@ -2099,7 +2265,13 @@ def save_seg_saxs_g2(g2, res_pargs, time_label=True, *argv, **kwargs): if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) @@ -2248,8 +2420,12 @@ def multi_uids_saxs_flow_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2281,8 +2457,12 @@ def multi_uids_saxs_flow_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " % good_start + ) print("The good_end frame number is: %s " % good_end_) norm = None @@ -2308,7 +2488,13 @@ def multi_uids_saxs_flow_xpcs_analysis( ) else: g2, lag_stepsv = cal_g2p( - FD, seg_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + seg_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_stepsv): @@ -2321,7 +2507,14 @@ def multi_uids_saxs_flow_xpcs_analysis( path=data_dir_, uid=uid + "_1a_mq%s" % conf, ) - save_g2(g2, taus=taus, qr=rcen, qz=acen, uid=uid + "_1a_mq%s" % conf, path=data_dir_) + save_g2( + g2, + taus=taus, + qr=rcen, + qz=acen, + uid=uid + "_1a_mq%s" % conf, + path=data_dir_, + ) if nconf == 0: g2s[run_seq + 1][i]["v"] = g2 # perpendular @@ -2430,7 +2623,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) dfv = save_g2_fit_para_tocsv( - g2_fit_result, filename=uid + "_1a_mq" + conf + "_fit_para", path=data_dir_ + g2_fit_result, + filename=uid + "_1a_mq" + conf + "_fit_para", + path=data_dir_, ) fit_q_rate( @@ -2442,7 +2637,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) # psave_obj( fit_result, data_dir_ + 'uid=%s-g2-fit-para'%uid ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters FD = 0 avg_img, imgsum, bad_frame_list = [0, 0, 0] @@ -2567,8 +2764,12 @@ def multi_uids_saxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2600,8 +2801,13 @@ def multi_uids_saxs_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) hmask = create_hot_pixel_mask(avg_img, 1e8) @@ -2619,11 +2825,23 @@ def multi_uids_saxs_xpcs_analysis( norm = get_pixelist_interp_iq(qp, iq, ring_mask, center) if not para_run: g2, lag_steps_ = cal_g2c( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) else: g2, lag_steps_ = cal_g2p( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_steps_): @@ -2643,7 +2861,11 @@ def multi_uids_saxs_xpcs_analysis( good_series = apply_mask(imgsa[good_start:], mask) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2652,7 +2874,12 @@ def multi_uids_saxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, ring_mask, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + ring_mask, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ @@ -2660,7 +2887,9 @@ def multi_uids_saxs_xpcs_analysis( taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid + ) save_saxs_g2(g2, res_pargs) # plot_saxs_g2( g2, taus, vlim=[0.95, 1.05], res_pargs=res_pargs) if fit: @@ -2669,15 +2898,31 @@ def multi_uids_saxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.05], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.0, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.0, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, ) fit_q_rate( - q_ring_center[:], fit_result["rate"][:], power_variable=False, uid=uid, path=data_dir_ + q_ring_center[:], + fit_result["rate"][:], + power_variable=False, + uid=uid, + path=data_dir_, ) psave_obj(fit_result, data_dir_ + "uid=%s-g2-fit-para" % uid) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 print("*" * 40) @@ -2689,8 +2934,8 @@ def multi_uids_saxs_xpcs_analysis( def plot_mul_g2(g2s, md): """ Plot multi g2 functions generated by multi_uids_saxs_xpcs_analysis - Will create a large plot with q_number pannels - Each pannel (for each q) will show a number (run number of g2 functions + Will create a large plot with q_number panels + Each panel (for each q) will show a number (run number of g2 functions """ q_ring_center = md["q_ring_center"] @@ -2738,7 +2983,12 @@ def plot_mul_g2(g2s, md): # markersize=6, label = '%s'%sid) ax.semilogx( - taus[1:len_], y[1:len_], marker=markers[i], color=colors[i], markersize=6, label="%s" % sid + taus[1:len_], + y[1:len_], + marker=markers[i], + color=colors[i], + markersize=6, + label="%s" % sid, ) if sn == 0: @@ -2753,13 +3003,15 @@ def get_QrQw_From_RoiMask(roi_mask, setup_pargs): Input: roi_mask: int-type array, 2D roi mask, with q-index starting from 1 setup_pargs: dict, at least with keys as - dpix (det pixel size),lamdba_( wavelength), center( beam center) + dpix (det pixel size),lambda_( wavelength), center( beam center) Output: qr_cen: the q center of each ring qr_wid: the q width of each ring """ - qp_roi, iq_roi, q_roi = get_circular_average(roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs) + qp_roi, iq_roi, q_roi = get_circular_average( + roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs + ) Nmax = roi_mask.max() qr_cen = np.zeros(Nmax) qr_wid = np.zeros(Nmax) diff --git a/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py b/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py index 062db0d..ecdafc0 100644 --- a/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py +++ b/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py @@ -2,7 +2,6 @@ from pyCHX.chx_packages import * -from pyCHX.chx_xpcs_xsvs_jupyter import run_xpcs_xsvs_single def XPCS_XSVS_SAXS_Multi( @@ -36,7 +35,13 @@ def XPCS_XSVS_SAXS_Multi( good_start = run_pargs["good_start"] use_imgsum_norm = run_pargs["use_imgsum_norm"] - mask = load_mask(mask_path, mask_name, plot_=False, image_name="%s_mask" % mask_name, reverse=True) + mask = load_mask( + mask_path, + mask_name, + plot_=False, + image_name="%s_mask" % mask_name, + reverse=True, + ) # mask *= pixel_mask mask[:, 2069] = 0 # False #Concluded from the previous results # np.save( data_dir + 'mask', mask) @@ -44,7 +49,9 @@ def XPCS_XSVS_SAXS_Multi( mask_load = mask.copy() username = getpass.getuser() - data_dir0 = os.path.join("/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/") + data_dir0 = os.path.join( + "/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/" + ) os.makedirs(data_dir0, exist_ok=True) print("Results from this analysis will be stashed in the directory %s" % data_dir0) data_dir = os.path.join(data_dir0, uid_average + "/") @@ -86,7 +93,9 @@ def XPCS_XSVS_SAXS_Multi( wat = get_averaged_data_from_multi_res(multi_res, keystr="wat") if run_t_ROI_Inten: times_roi = get_averaged_data_from_multi_res(multi_res, keystr="times_roi") - mean_int_sets = get_averaged_data_from_multi_res(multi_res, keystr="mean_int_sets") + mean_int_sets = get_averaged_data_from_multi_res( + multi_res, keystr="mean_int_sets" + ) if run_one_time: g2 = get_averaged_data_from_multi_res(multi_res, keystr="g2") @@ -105,7 +114,12 @@ def XPCS_XSVS_SAXS_Multi( function=fit_g2_func, vlim=[0.95, 1.05], fit_range=None, - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, guess_values={ "baseline": 1.0, "beta": 0.05, @@ -113,10 +127,14 @@ def XPCS_XSVS_SAXS_Multi( "relaxation_rate": 0.01, }, ) - g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir) + g2_fit_paras = save_g2_fit_para_tocsv( + g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir + ) if run_two_time: - g12b = get_averaged_data_from_multi_res(multi_res, keystr="g12b", different_length=True) + g12b = get_averaged_data_from_multi_res( + multi_res, keystr="g12b", different_length=True + ) g2b = get_averaged_data_from_multi_res(multi_res, keystr="g2b") tausb = get_averaged_data_from_multi_res(multi_res, keystr="tausb") @@ -135,7 +153,12 @@ def XPCS_XSVS_SAXS_Multi( function=fit_g2_func, vlim=[0.95, 1.05], fit_range=None, - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, guess_values={ "baseline": 1.0, "beta": 0.05, @@ -144,7 +167,9 @@ def XPCS_XSVS_SAXS_Multi( }, ) - g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir) + g2b_fit_paras = save_g2_fit_para_tocsv( + g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir + ) if run_four_time: g4 = get_averaged_data_from_multi_res(multi_res, keystr="g4") @@ -163,7 +188,9 @@ def XPCS_XSVS_SAXS_Multi( contrast_factorL = get_averaged_data_from_multi_res( multi_res, keystr="contrast_factorL", different_length=False ) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) cont_pds = save_arrays( contrast_factorL, label=times_xsvs, @@ -172,9 +199,15 @@ def XPCS_XSVS_SAXS_Multi( return_res=True, ) if False: - spec_kmean = get_averaged_data_from_multi_res(multi_res, keystr="spec_kmean") - spec_pds = get_averaged_data_from_multi_res(multi_res, keystr="spec_pds", different_length=False) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + spec_kmean = get_averaged_data_from_multi_res( + multi_res, keystr="spec_kmean" + ) + spec_pds = get_averaged_data_from_multi_res( + multi_res, keystr="spec_pds", different_length=False + ) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) spec_his, spec_std = get_his_std_from_pds(spec_pds, his_shapes=None) ML_val, KL_val, K_ = get_xsvs_fit( spec_his, @@ -290,14 +323,32 @@ def XPCS_XSVS_SAXS_Multi( uid=uidstr, path=data_dir, ) - show_qzr_roi(avg_img, roi_mask, inc_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uidstr) + show_qzr_roi( + avg_img, + roi_mask, + inc_x0, + ticks, + alpha=0.5, + save=True, + path=data_dir, + uid=uidstr, + ) if run_waterfall: plot_waterfallc( - wat, qth_interest, aspect=None, vmax=np.max(wat), uid=uid, save=True, path=data_dir, beg=good_start + wat, + qth_interest, + aspect=None, + vmax=np.max(wat), + uid=uid, + save=True, + path=data_dir, + beg=good_start, ) if run_t_ROI_Inten: - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uid, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uid, save=True, path=data_dir + ) if run_one_time: plot_g2_general( @@ -418,7 +469,16 @@ def XPCS_XSVS_SAXS_Multi( Exdt = {} if scat_geometry == "gi_saxs": for k, v in zip( - ["md", "roi_mask", "qval_dict", "avg_img", "mask", "pixel_mask", "imgsum", "qr_1d_pds"], + [ + "md", + "roi_mask", + "qval_dict", + "avg_img", + "mask", + "pixel_mask", + "imgsum", + "qr_1d_pds", + ], [md, roi_mask, qval_dict, avg_img, mask, pixel_mask, imgsum, qr_1d_pds], ): Exdt[k] = v @@ -463,7 +523,9 @@ def XPCS_XSVS_SAXS_Multi( for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): Exdt[k] = v if run_two_time: - for k, v in zip(["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b]): + for k, v in zip( + ["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b] + ): Exdt[k] = v if run_four_time: for k, v in zip(["taus4", "g4"], [taus4, g4]): @@ -489,7 +551,10 @@ def XPCS_XSVS_SAXS_Multi( pdf_out_dir = data_dir pdf_filename = "XPCS_Analysis_Report_for_%s%s.pdf" % (uid_average, pdf_version) if run_xsvs: - pdf_filename = "XPCS_XSVS_Analysis_Report_for_%s%s.pdf" % (uid_average, pdf_version) + pdf_filename = "XPCS_XSVS_Analysis_Report_for_%s%s.pdf" % ( + uid_average, + pdf_version, + ) # pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf"%(uid_average,'_2') make_pdf_report( @@ -512,7 +577,11 @@ def XPCS_XSVS_SAXS_Multi( pname = pdf_out_dir + pdf_filename atch = [Attachment(open(pname, "rb"))] try: - update_olog_uid(uid=fuids[-1], text="Add XPCS Averaged Analysis PDF Report", attachments=atch) + update_olog_uid( + uid=fuids[-1], + text="Add XPCS Averaged Analysis PDF Report", + attachments=atch, + ) except: print( "I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file." @@ -525,10 +594,17 @@ def XPCS_XSVS_SAXS_Multi( if False: - start_time, stop_time = "2016-12-1 16:30:00", "2016-12-1 16:31:50" # for 10 nm, 20, for test purpose + start_time, stop_time = ( + "2016-12-1 16:30:00", + "2016-12-1 16:31:50", + ) # for 10 nm, 20, for test purpose suf_ids = find_uids(start_time, stop_time) sp = "test" - uid_averages = [sp + "_vs_test1_120116", sp + "_vs_test2_120116", sp + "_vs_test3_120116"] + uid_averages = [ + sp + "_vs_test1_120116", + sp + "_vs_test2_120116", + sp + "_vs_test3_120116", + ] run_pargs = dict( scat_geometry="saxs", @@ -601,6 +677,8 @@ def XPCS_XSVS_SAXS_Multi( suf_ids[1][i * step : (i + 1) * step], suf_ids[2][i * step : (i + 1) * step], ) - XPCS_XSVS_SAXS_Multi(0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i]) + XPCS_XSVS_SAXS_Multi( + 0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i] + ) run_time(t0) diff --git a/pyCHX/_version.py b/pyCHX/_version.py index 6532713..519f9f1 100644 --- a/pyCHX/_version.py +++ b/pyCHX/_version.py @@ -68,7 +68,10 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen( - [c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None) + [c] + args, + cwd=cwd, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), ) break except EnvironmentError: @@ -104,7 +107,12 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): "prefix '%s'" % (root, dirname, parentdir_prefix) ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - return {"version": dirname[len(parentdir_prefix) :], "full-revisionid": None, "dirty": False, "error": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + } @register_vcs_handler("git", "get_keywords") @@ -164,7 +172,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): r = ref[len(tag_prefix) :] if verbose: print("picking %s" % r) - return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") @@ -193,7 +206,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) - describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) + describe_out = run_command( + GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -224,7 +239,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces @@ -234,7 +249,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] @@ -408,7 +426,12 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + } def get_versions(): @@ -452,4 +475,9 @@ def get_versions(): except NotThisMethod: pass - return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + } diff --git a/pyCHX/chx_Fitters2D.py b/pyCHX/chx_Fitters2D.py index a2f27ab..8be7062 100644 --- a/pyCHX/chx_Fitters2D.py +++ b/pyCHX/chx_Fitters2D.py @@ -11,7 +11,12 @@ def gauss_func(x, xc, amp, sigma, baseline): def gauss2D_func(x, y, xc, amp, sigmax, yc, sigmay, baseline): - return amp * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + baseline + return ( + amp + * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + + baseline + ) def extract_param(bestfits, key): @@ -72,13 +77,15 @@ def __call__(self, x, y, vx, vy, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys() + ) # assumes first var is dependent var, and save last params V = np.array([vx, vy]) self._res = self.mod.fit(V, x=x, y=y, params=params) @@ -139,7 +146,7 @@ def guess(self, **kwargs): if kwargs is not None: for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] return paramsdict @@ -186,15 +193,19 @@ def __call__(self, XY, img, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["XY"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["XY"], param_names=self.params.keys() + ) # assumes first var is dependent var - res = self.mod.fit(img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs) + res = self.mod.fit( + img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs + ) ## old version, only return values # add reduced chisq to parameter list # res.best_values['chisq']=res.redchi @@ -266,7 +277,9 @@ def __call__(self, img, x=None, y=None, **kwargs): self.params["amp"].min = 0 return super(Gauss2DFitter, self).__call__(XY, img, **kwargs) - def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0): + def fitfunc( + self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0 + ): """ xy : 2 by N by N matrix containing x and y xy[0] : x @@ -284,7 +297,9 @@ def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigma yc = X.shape[0] // 2 return ( - amp * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + amp + * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + baseline ) @@ -331,7 +346,7 @@ def guess(self, img, XY=None, **kwargs): paramsdict["sigmay"] = 1 # print( paramsdict ) for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] # print( paramsdict ) return paramsdict diff --git a/pyCHX/chx_compress.py b/pyCHX/chx_compress.py index 16e9881..653933c 100644 --- a/pyCHX/chx_compress.py +++ b/pyCHX/chx_compress.py @@ -1,11 +1,8 @@ -import gc import os import pickle as pkl import shutil import struct import sys -from contextlib import closing -from glob import iglob from multiprocessing import Pool import dill @@ -14,7 +11,7 @@ # imports handler from CHX # this is where the decision is made whether or not to use dask # from chxtools.handlers import EigerImages, EigerHandler -from eiger_io.fs_handler import EigerHandler, EigerImages +from eiger_io.fs_handler import EigerImages from tqdm import tqdm from pyCHX.chx_generic_functions import ( @@ -28,7 +25,7 @@ reverse_updown, rot90_clockwise, ) -from pyCHX.chx_libs import RUN_GUI, LogNorm, datetime, db, getpass, np, os, roi, time +from pyCHX.chx_libs import RUN_GUI, LogNorm, db, np, os, roi, time def run_dill_encoded(what): @@ -37,7 +34,9 @@ def run_dill_encoded(what): def apply_async(pool, fun, args, callback=None): - return pool.apply_async(run_dill_encoded, (dill.dumps((fun, args)),), callback=callback) + return pool.apply_async( + run_dill_encoded, (dill.dumps((fun, args)),), callback=callback + ) def map_async(pool, fun, args): @@ -196,7 +195,9 @@ def compress_eigerdata( images_per_file=images_per_file, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) beg = 0 return read_compressed_eigerdata( mask, @@ -243,7 +244,9 @@ def read_compressed_eigerdata( CAL = True else: try: - mask, avg_img, imgsum, bad_frame_list_ = pkl.load(open(filename + ".pkl", "rb")) + mask, avg_img, imgsum, bad_frame_list_ = pkl.load( + open(filename + ".pkl", "rb") + ) except: CAL = True if CAL: @@ -259,7 +262,14 @@ def read_compressed_eigerdata( plot_=False, bad_frame_list=bad_frame_list, ) - avg_img = get_avg_imgc(FD, beg=None, end=None, sampling=1, plot_=False, bad_frame_list=bad_frame_list_) + avg_img = get_avg_imgc( + FD, + beg=None, + end=None, + sampling=1, + plot_=False, + bad_frame_list=bad_frame_list_, + ) FD.FID.close() return mask, avg_img, imgsum, bad_frame_list_ @@ -302,7 +312,9 @@ def para_compress_eigerdata( if not copy_rawdata: images_ = EigerImages(data_path, images_per_file, md) else: - print("Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data.") + print( + "Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data." + ) print("Copying...") copy_data(data_path, new_path) # print(data_path, new_path) @@ -322,11 +334,17 @@ def para_compress_eigerdata( N = int(np.ceil(N / bins)) Nf = int(np.ceil(N / num_sub)) if Nf > cpu_core_number: - print("The process number is larger than %s (XF11ID server core number)" % cpu_core_number) + print( + "The process number is larger than %s (XF11ID server core number)" + % cpu_core_number + ) num_sub_old = num_sub num_sub = int(np.ceil(N / cpu_core_number)) Nf = int(np.ceil(N / num_sub)) - print("The sub compressed file number was changed from %s to %s" % (num_sub_old, num_sub)) + print( + "The sub compressed file number was changed from %s to %s" + % (num_sub_old, num_sub) + ) create_compress_header(md, filename + "-header", nobytes, bins, rot90=rot90) # print( 'done for header here') # print(data_path_, images_per_file) @@ -374,7 +392,7 @@ def para_compress_eigerdata( print("Bad frame list are: %s" % bad_frame_list) else: print("No bad frames are involved.") - print("Combining the seperated compressed files together...") + print("Combining the separated compressed files together...") combine_compressed(filename, Nf, del_old=True) del results del res_ @@ -445,15 +463,22 @@ def para_segment_compress_eigerdata( num_sub *= bins if N % num_sub: Nf = N // num_sub + 1 - print("The average image intensity would be slightly not correct, about 1% error.") - print("Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image") + print( + "The average image intensity would be slightly not correct, about 1% error." + ) + print( + "Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image" + ) else: Nf = N // num_sub print("It will create %i temporary files for parallel compression." % Nf) if Nf > num_max_para_process: N_runs = np.int(np.ceil(Nf / float(num_max_para_process))) - print("The parallel run number: %s is larger than num_max_para_process: %s" % (Nf, num_max_para_process)) + print( + "The parallel run number: %s is larger than num_max_para_process: %s" + % (Nf, num_max_para_process) + ) else: N_runs = 1 result = {} @@ -529,7 +554,9 @@ def segment_compress_eigerdata( else: images = EigerImages(data_path, images_per_file, md)[N1:N2] if reverse: - images = reverse_updown(EigerImages(data_path, images_per_file, md))[N1:N2] + images = reverse_updown(EigerImages(data_path, images_per_file, md))[ + N1:N2 + ] if rot90: images = rot90_clockwise(images) @@ -571,7 +598,11 @@ def segment_compress_eigerdata( v = np.ravel(np.array(img, dtype=dtype))[p] dlen = len(p) imgsum[n] = v.sum() - if (dlen == 0) or (imgsum[n] > bad_pixel_threshold) or (imgsum[n] <= bad_pixel_low_threshold): + if ( + (dlen == 0) + or (imgsum[n] > bad_pixel_threshold) + or (imgsum[n] <= bad_pixel_low_threshold) + ): dlen = 0 fp.write(struct.pack("@I", dlen)) else: @@ -582,12 +613,16 @@ def segment_compress_eigerdata( if bins == 1: fp.write(struct.pack("@{}{}".format(dlen, "ih"[nobytes == 2]), *v)) else: - fp.write(struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v)) # n +=1 + fp.write( + struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v) + ) # n +=1 del p, v, img fp.flush() fp.close() avg_img /= good_count - bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | ( + np.array(imgsum) <= bad_pixel_low_threshold + ) sys.stdout.write("#") sys.stdout.flush() # del images, mask, avg_img, imgsum, bad_frame_list @@ -838,7 +873,8 @@ def init_compress_eigerdata( avg_img /= good_count bad_frame_list = np.where( - (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) )[0] # bad_frame_list1 = np.where( np.array(imgsum) > bad_pixel_threshold )[0] # bad_frame_list2 = np.where( np.array(imgsum) < bad_pixel_low_threshold )[0] @@ -1041,7 +1077,9 @@ def __init__(self, FD, bins=100): self.FD = FD if (FD.end - FD.beg) % bins: - print("Please give a better bins number and make the length of FD/bins= integer") + print( + "Please give a better bins number and make the length of FD/bins= integer" + ) else: self.bins = bins self.md = FD.md @@ -1050,7 +1088,12 @@ def __init__(self, FD, bins=100): Nimg = FD.end - FD.beg slice_num = Nimg // bins self.end = slice_num - self.time_edge = np.array(create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins)) + FD.beg + self.time_edge = ( + np.array( + create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins) + ) + + FD.beg + ) self.get_bin_frame() def get_bin_frame(self): @@ -1060,7 +1103,9 @@ def get_bin_frame(self): # print (n) t1, t2 = self.time_edge[n] # print( t1, t2) - self.frames[:, :, n] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + self.frames[:, :, n] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) def rdframe(self, n): return self.frames[:, :, n] @@ -1087,7 +1132,7 @@ def __init__(self, filename, mode="rb"): if mode == "wb": raise ValueError("Write mode 'wb' not supported yet") if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode # open the file descriptor @@ -1174,7 +1219,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # dlen is 4 bytes cur = self.frame_indexes[n] dlen = np.frombuffer(self._fd[cur : cur + 4], dtype=" bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold))[0] + np.where( + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) + )[0] + FD.beg ) diff --git a/pyCHX/chx_compress_analysis.py b/pyCHX/chx_compress_analysis.py index 102ddfa..d971940 100644 --- a/pyCHX/chx_compress_analysis.py +++ b/pyCHX/chx_compress_analysis.py @@ -1,13 +1,8 @@ from __future__ import absolute_import, division, print_function import logging -import os -import struct -from collections import namedtuple import matplotlib.pyplot as plt -from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags from tqdm import tqdm from pyCHX.chx_generic_functions import save_arrays @@ -17,18 +12,10 @@ from pyCHX.chx_libs import ( RUN_GUI, Figure, - LogNorm, colors, - colors_, - datetime, - db, - getpass, markers, - markers_, np, - os, roi, - time, ) logger = logging.getLogger(__name__) @@ -36,21 +23,17 @@ from modest_image import imshow from pyCHX.chx_compress import ( - Multifile, - compress_eigerdata, get_avg_imgc, - get_each_frame_intensityc, - init_compress_eigerdata, mean_intensityc, - pass_FD, - read_compressed_eigerdata, ) from pyCHX.chx_generic_functions import find_bad_pixels_FD # from pyCHX.chx_compress import * -def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15): +def get_time_edge_avg_img( + FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15 +): """YG Dev Nov 14, 2017@CHX Update@2019/6/12 with option of apply a threshold for each frame Get averaged img by giving FD and frame edges @@ -76,14 +59,20 @@ def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=Fa for i in tqdm(range(Nt)): t1, t2 = frame_edge[i] if not apply_threshold: - d[i] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + d[i] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) else: dti = np.zeros([t2 - t1, avg_imgi.shape[0], avg_imgi.shape[1]]) j = 0 for ti in range(t1, t2): # print( j, ti ) badpi = find_bad_pixels_FD( - np.arange(ti, ti + 1), FD, img_shape=avg_imgi.shape, threshold=threshold, show_progress=False + np.arange(ti, ti + 1), + FD, + img_shape=avg_imgi.shape, + threshold=threshold, + show_progress=False, ) badpi = np.array(badpi, dtype=float) badpi[badpi == 0] = np.nan @@ -102,7 +91,14 @@ def plot_imgs(imgs, image_name=None, *argv, **kwargs): def cal_waterfallc( - FD, labeled_array, qindex=1, bin_waterfall=False, waterfall_roi_size=None, save=False, *argv, **kwargs + FD, + labeled_array, + qindex=1, + bin_waterfall=False, + waterfall_roi_size=None, + save=False, + *argv, + **kwargs, ): """Compute the mean intensity for each ROI in the compressed file (FD) @@ -140,7 +136,12 @@ def cal_waterfallc( if labeled_array_.shape != (FD.md["ncols"], FD.md["nrows"]): raise ValueError( " `image` shape (%d, %d) in FD is not equal to the labeled_array shape (%d, %d)" - % (FD.md["ncols"], FD.md["nrows"], labeled_array_.shape[0], labeled_array_.shape[1]) + % ( + FD.md["ncols"], + FD.md["nrows"], + labeled_array_.shape[0], + labeled_array_.shape[1], + ) ) # pre-allocate an array for performance @@ -157,7 +158,9 @@ def cal_waterfallc( norm = np.bincount(qind)[1:] n = 0 # for i in tqdm(range( FD.beg , FD.end )): - for i in tqdm(range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex): + for i in tqdm( + range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 @@ -169,7 +172,9 @@ def cal_waterfallc( watf_ = watf.copy() watf = np.zeros([watf_.shape[0], waterfall_roi_size[0]]) for i in range(waterfall_roi_size[1]): - watf += watf_[:, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1)] + watf += watf_[ + :, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1) + ] watf /= waterfall_roi_size[0] if save: @@ -191,7 +196,7 @@ def plot_waterfallc( return_fig=False, cmap="viridis", *argv, - **kwargs + **kwargs, ): """plot waterfall for a giving compressed file @@ -230,7 +235,9 @@ def plot_waterfallc( vmin = wat.min() if aspect is None: aspect = wat.shape[0] / wat.shape[1] - im = imshow(ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation) + im = imshow( + ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation + ) # im = ax.imshow(wat.T, cmap='viridis', vmax=vmax,extent= extent,interpolation = interpolation ) fig.colorbar(im) ax.set_aspect(aspect) @@ -249,7 +256,9 @@ def plot_waterfallc( return fig, ax, im -def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs): +def get_waterfallc( + FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs +): """plot waterfall for a giving compressed file FD: class object, the compressed file handler @@ -288,12 +297,16 @@ def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=Fals return wat -def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs): +def cal_each_ring_mean_intensityc( + FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs +): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = mean_intensityc(FD, ring_mask, sampling, index=None, multi_cor=multi_cor) + mean_int_sets, index_list = mean_intensityc( + FD, ring_mask, sampling, index=None, multi_cor=multi_cor + ) if timeperframe is None: times = np.arange(FD.end - FD.beg) + FD.beg # get the time for each frame else: @@ -302,7 +315,9 @@ def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, return times, mean_int_sets -def plot_each_ring_mean_intensityc(times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs): +def plot_each_ring_mean_intensityc( + times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs +): """ Plot time dependent mean intensity of each ring """ @@ -315,7 +330,14 @@ def plot_each_ring_mean_intensityc(times, mean_int_sets, xlabel="Frame", save=Fa ax.set_title("%s--Mean intensity of each ROI" % uid) for i in range(num_rings): # print( markers[i], colors[i] ) - ax.plot(times, mean_int_sets[:, i], label="ROI " + str(i + 1), marker=markers[i], color=colors[i], ls="-") + ax.plot( + times, + mean_int_sets[:, i], + label="ROI " + str(i + 1), + marker=markers[i], + color=colors[i], + ls="-", + ) ax.set_xlabel(xlabel) ax.set_ylabel("Mean Intensity") ax.legend(loc="best", fontsize="x-small", fancybox=True, framealpha=0.5) @@ -334,7 +356,14 @@ def plot_each_ring_mean_intensityc(times, mean_int_sets, xlabel="Frame", save=Fa def get_each_ring_mean_intensityc( - FD, ring_mask, sampling=1, timeperframe=None, plot_=False, save=False, *argv, **kwargs + FD, + ring_mask, + sampling=1, + timeperframe=None, + plot_=False, + save=False, + *argv, + **kwargs, ): """ get time dependent mean intensity of each ring @@ -355,7 +384,13 @@ def get_each_ring_mean_intensityc( ax.set_title("%s--Mean intensity of each ROI" % uid) for i in range(num_rings): - ax.plot(times, mean_int_sets[:, i], label="ROI " + str(i + 1), marker="o", ls="-") + ax.plot( + times, + mean_int_sets[:, i], + label="ROI " + str(i + 1), + marker="o", + ls="-", + ) if timeperframe is not None: ax.set_xlabel("Time, sec") else: diff --git a/pyCHX/chx_correlation.py b/pyCHX/chx_correlation.py index d636ae7..37d1dc2 100644 --- a/pyCHX/chx_correlation.py +++ b/pyCHX/chx_correlation.py @@ -39,6 +39,7 @@ """ This module is for functions specific to time correlation """ + from __future__ import absolute_import, division, print_function from collections import namedtuple @@ -141,7 +142,8 @@ def _one_time_process( norm[level + 1][ind] += 1 else: for w, arr in zip( - [past_img * future_img, past_img, future_img], [G, past_intensity_norm, future_intensity_norm] + [past_img * future_img, past_img, future_img], + [G, past_intensity_norm, future_intensity_norm], ): binned = np.bincount(label_array, weights=w)[1:] arr[t_index] += (binned / num_pixels - arr[t_index]) / normalize @@ -271,7 +273,7 @@ def lazy_one_time(image_iterable, num_levels, num_bufs, labels, internal_state=N ------ namedtuple A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -393,7 +395,7 @@ def multi_tau_auto_corr(num_levels, num_bufs, labels, images): author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -462,7 +464,9 @@ def two_time_corr(labels, images, num_frames, num_bufs, num_levels=1): return two_time_state_to_results(result) -def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None): +def lazy_two_time( + labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None +): """Generator implementation of two-time correlation If you do not want multi-tau correlation, set num_levels to 1 and num_bufs to the number of images you wish to correlate @@ -494,7 +498,7 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -522,7 +526,9 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i 010401(1-4), 2007. """ if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state @@ -578,7 +584,10 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) @@ -628,7 +637,16 @@ def two_time_state_to_results(state): def _two_time_process( - buf, g2, label_array, num_bufs, num_pixels, img_per_level, lag_steps, current_img_time, level, buf_no + buf, + g2, + label_array, + num_bufs, + num_pixels, + img_per_level, + lag_steps, + current_img_time, + level, + buf_no, ): """ Parameters @@ -689,7 +707,9 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: g2[:, tind1, tind2] = tmp_binned / (pi_binned * fi_binned) * num_pixels @@ -795,7 +815,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -993,7 +1015,10 @@ def __init__(self, shape, mask=None, normalization=None, wrap=False): self.positions.append(np.arange(maskcorr.shape[0]) - center[0]) elif mask.ndim == 2: self.positions.append( - [np.arange(maskcorr.shape[0]) - center[0], np.arange(maskcorr.shape[1]) - center[1]] + [ + np.arange(maskcorr.shape[0]) - center[0], + np.arange(maskcorr.shape[1]) - center[1], + ] ) if len(self.ids) == 1: @@ -1038,22 +1063,32 @@ def __call__(self, img1, img2=None, normalization=None): self.tmpimgs[i].ravel()[self.subpxlsts[i]] = img1.ravel()[self.pxlsts[i]] if not self_correlation: self.tmpimgs2[i] *= 0 - self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[self.pxlsts[i]] + self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[ + self.pxlsts[i] + ] # multiply by maskcorrs > 0 to ignore invalid regions if self_correlation: ccorr = _cross_corr(self.tmpimgs[i]) * (self.maskcorrs[i] > 0) else: - ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * (self.maskcorrs[i] > 0) + ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * ( + self.maskcorrs[i] > 0 + ) # now handle the normalizations if "symavg" in normalization: # do symmetric averaging - Icorr = _cross_corr(self.tmpimgs[i] * self.submasks[i], self.submasks[i]) + Icorr = _cross_corr( + self.tmpimgs[i] * self.submasks[i], self.submasks[i] + ) if self_correlation: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs[i] * self.submasks[i] + ) else: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs2[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs2[i] * self.submasks[i] + ) # there is an extra condition that Icorr*Icorr2 != 0 w = np.where(np.abs(Icorr * Icorr2) > 0) ccorr[w] *= self.maskcorrs[i][w] / Icorr[w] / Icorr2[w] @@ -1061,7 +1096,10 @@ def __call__(self, img1, img2=None, normalization=None): if "regular" in normalization: # only run on overlapping regions for correlation w = self.pxlst_maskcorrs[i] - ccorr[w] /= self.maskcorrs[i][w] * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ccorr[w] /= ( + self.maskcorrs[i][w] + * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ) ccorrs.append(ccorr) diff --git a/pyCHX/chx_correlationc.py b/pyCHX/chx_correlationc.py index 02bc754..bb1cc6e 100644 --- a/pyCHX/chx_correlationc.py +++ b/pyCHX/chx_correlationc.py @@ -95,7 +95,8 @@ def _one_time_process( norm[level + 1][ind] += 1 else: for w, arr in zip( - [past_img * future_img, past_img, future_img], [G, past_intensity_norm, future_intensity_norm] + [past_img * future_img, past_img, future_img], + [G, past_intensity_norm, future_intensity_norm], ): binned = np.bincount(label_array, weights=w)[1:] # nonz = np.where(w)[0] @@ -184,7 +185,6 @@ def _one_time_process_error( if np.isnan(past_img).any() or np.isnan(future_img).any(): norm[level + 1][ind] += 1 else: - # for w, arr in zip([past_img*future_img, past_img, future_img], # [G, past_intensity_norm, future_intensity_norm, # ]): @@ -309,7 +309,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -397,7 +399,9 @@ def _init_state_one_time(num_levels, num_bufs, labels, cal_error=False): # matrix for normalizing G into g2 future_intensity = np.zeros_like(G) if cal_error: - G_all = np.zeros((int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64) + G_all = np.zeros( + (int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64 + ) # matrix for normalizing G into g2 past_intensity_all = np.zeros_like(G_all) @@ -501,7 +505,7 @@ def lazy_one_time( ------- A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -908,14 +912,21 @@ def auto_corr_scat_factor(lags, beta, relaxation_rate, baseline=1): def multi_tau_auto_corr( - num_levels, num_bufs, labels, images, bad_frame_list=None, imgsum=None, norm=None, cal_error=False + num_levels, + num_bufs, + labels, + images, + bad_frame_list=None, + imgsum=None, + norm=None, + cal_error=False, ): """Wraps generator implementation of multi-tau Original code(in Yorick) for multi tau auto correlation author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -938,7 +949,9 @@ def multi_tau_auto_corr( return result.g2, result.lag_steps -def multi_tau_two_time_auto_corr(num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None): +def multi_tau_two_time_auto_corr( + num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None +): """Wraps generator implementation of multi-tau two time correlation This function computes two-time correlation Original code : author: Yugang Zhang @@ -964,9 +977,15 @@ def multi_tau_two_time_auto_corr(num_lev, num_buf, ring_mask, FD, bad_frame_list def lazy_two_time( - FD, num_levels, num_bufs, labels, two_time_internal_state=None, bad_frame_list=None, imgsum=None, norm=None + FD, + num_levels, + num_bufs, + labels, + two_time_internal_state=None, + bad_frame_list=None, + imgsum=None, + norm=None, ): - # def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, # two_time_internal_state=None): """Generator implementation of two-time correlation @@ -997,7 +1016,7 @@ def lazy_two_time( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -1026,7 +1045,9 @@ def lazy_two_time( num_frames = FD.end - FD.beg if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state qind, pixelist = roi.extract_label_indices(labels) @@ -1097,7 +1118,10 @@ def lazy_two_time( t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -1145,7 +1169,16 @@ def two_time_state_to_results(state): def _two_time_process( - buf, g2, label_array, num_bufs, num_pixels, img_per_level, lag_steps, current_img_time, level, buf_no + buf, + g2, + label_array, + num_bufs, + num_pixels, + img_per_level, + lag_steps, + current_img_time, + level, + buf_no, ): """ Parameters @@ -1208,9 +1241,13 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: - g2[:, int(tind1), int(tind2)] = tmp_binned / (pi_binned * fi_binned) * num_pixels + g2[:, int(tind1), int(tind2)] = ( + tmp_binned / (pi_binned * fi_binned) * num_pixels + ) # print( num_pixels ) @@ -1300,7 +1337,16 @@ def one_time_from_two_time(two_time_corr): return one_time_corr -def cal_c12c(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_lev=None, imgsum=None, norm=None): +def cal_c12c( + FD, + ring_mask, + bad_frame_list=None, + good_start=0, + num_buf=8, + num_lev=None, + imgsum=None, + norm=None, +): """calculation two_time correlation by using a multi-tau algorithm""" # noframes = FD.end - good_start # number of frames, not "no frames" @@ -1311,11 +1357,16 @@ def cal_c12c(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_le if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) c12, lag_steps, state = multi_tau_two_time_auto_corr( @@ -1352,16 +1403,28 @@ def cal_g2c( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) if cal_error: g2, lag_steps, s = multi_tau_auto_corr( - num_lev, num_buf, ring_mask, FD, bad_frame_list, imgsum=imgsum, norm=norm, cal_error=cal_error + num_lev, + num_buf, + ring_mask, + FD, + bad_frame_list, + imgsum=imgsum, + norm=norm, + cal_error=cal_error, ) g2 = np.zeros_like(s.G) @@ -1398,15 +1461,24 @@ def cal_g2c( g2[:g_max, qi - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, qi - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) print("G2 with error bar calculation DONE!") return g2[:g_max, :], lag_steps[:g_max], g2_err[:g_max, :] / np.sqrt(nopr), s else: g2, lag_steps = multi_tau_auto_corr( - num_lev, num_buf, ring_mask, FD, bad_frame_list, imgsum=imgsum, norm=norm, cal_error=cal_error + num_lev, + num_buf, + ring_mask, + FD, + bad_frame_list, + imgsum=imgsum, + norm=norm, + cal_error=cal_error, ) print("G2 calculation DONE!") @@ -1437,13 +1509,23 @@ class Get_Pixel_Arrayc_todo(object): data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data() """ - def __init__(self, FD, pixelist, beg=None, end=None, norm=None, imgsum=None, norm_inten=None, qind=None): + def __init__( + self, + FD, + pixelist, + beg=None, + end=None, + norm=None, + imgsum=None, + norm_inten=None, + qind=None, + ): """ indexable: a images sequences pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - norm_inten: if True, each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + norm_inten: if True, each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if norm_inten is True: qind has to be given @@ -1501,14 +1583,20 @@ def get_data(self): pxlist = timg[p[w]] - 1 # np.bincount( qind[pxlist], weight= - if self.mean_int_sets is not None: # for each frame will normalize each ROI by it's averaged value + if ( + self.mean_int_sets is not None + ): # for each frame will normalize each ROI by it's averaged value for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1546,13 +1634,23 @@ class Get_Pixel_Arrayc(object): data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data() """ - def __init__(self, FD, pixelist, beg=None, end=None, norm=None, imgsum=None, mean_int_sets=None, qind=None): + def __init__( + self, + FD, + pixelist, + beg=None, + end=None, + norm=None, + imgsum=None, + mean_int_sets=None, + qind=None, + ): """ indexable: a images sequences pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - mean_int_sets: each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + mean_int_sets: each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if mean_int_sets is not None: qind has to be not None @@ -1605,14 +1703,20 @@ def get_data(self): w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 - if self.mean_int_sets is not None: # for normalization of each averaged ROI of each frame + if ( + self.mean_int_sets is not None + ): # for normalization of each averaged ROI of each frame for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1688,7 +1792,7 @@ def auto_two_Arrayc(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1704,7 +1808,9 @@ def auto_two_Arrayc(data_pixel, rois, index=None): sum2 = sum1.T # print( qi, qlist, ) # print( g12b[:,:,qi -1 ] ) - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1712,12 +1818,12 @@ def auto_two_Arrayc(data_pixel, rois, index=None): def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): """ Dec 16, 2015, Y.G.@CHX - a numpy operation method to get two-time correlation function by giving explict normalization + a numpy operation method to get two-time correlation function by giving explicit normalization Parameters: data: images sequence, shape as [img[0], img[1], imgs_length] rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs - norm: if not None, shoud be the shape as data_pixel, will normalize two time by this norm + norm: if not None, should be the shape as data_pixel, will normalize two time by this norm if None, will return two time without normalization Options: @@ -1755,7 +1861,7 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1771,7 +1877,9 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): else: sum1 = 1 sum2 = 1 - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1820,7 +1928,7 @@ def two_time_norm(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1848,7 +1956,7 @@ def check_normalization(frame_num, q_list, imgsa, data_pixel): frame_num: integer, the number of frame to be checked q_list: list of integer, the list of q to be checked imgsa: the raw data - data_pixel: the normalized data, caculated by fucntion Get_Pixel_Arrayc + data_pixel: the normalized data, calculated by function Get_Pixel_Arrayc Plot the intensities """ fig, ax = plt.subplots(2) @@ -1857,7 +1965,13 @@ def check_normalization(frame_num, q_list, imgsa, data_pixel): norm_data = data_pixel[frame_num][qind == q] raw_data = np.ravel(np.array(imgsa[frame_num]))[pixelist[qind == q]] # print(raw_data.mean()) - plot1D(raw_data, ax=ax[0], legend="q=%s" % (q), m=markers[n], title="fra=%s_raw_data" % (frame_num)) + plot1D( + raw_data, + ax=ax[0], + legend="q=%s" % (q), + m=markers[n], + title="fra=%s_raw_data" % (frame_num), + ) # plot1D( raw_data/mean_int_sets_[frame_num][q-1], ax=ax[1], legend='q=%s'%(q), m=markers[n], # xlabel='pixel',title='fra=%s_norm_data'%(frame_num)) diff --git a/pyCHX/chx_correlationp.py b/pyCHX/chx_correlationp.py index 496ec67..9ff9356 100644 --- a/pyCHX/chx_correlationp.py +++ b/pyCHX/chx_correlationp.py @@ -7,21 +7,17 @@ from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_correlationc import _one_time_process as _one_time_processp from pyCHX.chx_correlationc import _one_time_process_error as _one_time_process_errorp from pyCHX.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq +from pyCHX.chx_correlationc import _validate_and_transform_inputs from pyCHX.chx_libs import tqdm logger = logging.getLogger(__name__) @@ -93,7 +89,14 @@ def __setstate__(self, state): def lazy_two_timep( - FD, num_levels, num_bufs, labels, internal_state=None, bad_frame_list=None, imgsum=None, norm=None + FD, + num_levels, + num_bufs, + labels, + internal_state=None, + bad_frame_list=None, + imgsum=None, + norm=None, ): """Generator implementation of two-time correlation If you do not want multi-tau correlation, set num_levels to 1 and @@ -123,7 +126,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -229,7 +232,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -262,7 +268,16 @@ def lazy_two_timep( return s.g2, s.lag_steps -def cal_c12p(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_lev=None, imgsum=None, norm=None): +def cal_c12p( + FD, + ring_mask, + bad_frame_list=None, + good_start=0, + num_buf=8, + num_lev=None, + imgsum=None, + norm=None, +): """calculation g2 by using a multi-tau algorithm for a compressed file with parallel calculation """ @@ -272,24 +287,46 @@ def cal_c12p(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_le pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: S = norm.shape if len(S) > 1: norms = [ - norm[:, np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + :, + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], + ), + ] for i in np.unique(ring_mask)[1:] ] else: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], + ) + ] for i in np.unique(ring_mask)[1:] ] inputs = range(len(ring_masks)) @@ -354,7 +391,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -406,7 +443,10 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): lev_len, ) if cal_error: - self.G_all = np.zeros((int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64) + self.G_all = np.zeros( + (int((num_levels + 1) * num_bufs / 2), len(pixel_list)), + dtype=np.float64, + ) # matrix for normalizing G into g2 self.past_intensity_all = np.zeros_like(self.G_all) # matrix for normalizing G into g2 @@ -604,7 +644,13 @@ def lazy_one_timep( # return results(g2, s.lag_steps[:g_max], s) if cal_error: # return g2, s.lag_steps[:g_max], s.G[:g_max],s.past_intensity[:g_max], s.future_intensity[:g_max] #, s - return (None, s.lag_steps, s.G_all, s.past_intensity_all, s.future_intensity_all) # , s ) + return ( + None, + s.lag_steps, + s.G_all, + s.past_intensity_all, + s.future_intensity_all, + ) # , s ) else: return g2, s.lag_steps[:g_max] # , s @@ -630,13 +676,20 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] @@ -644,12 +697,27 @@ def cal_g2p( S = norm.shape if len(S) > 1: norms = [ - norm[:, np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + :, + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], + ), + ] for i in np.unique(ring_mask)[1:] ] else: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], + ) + ] for i in np.unique(ring_mask)[1:] ] inputs = range(len(ring_masks)) @@ -662,7 +730,17 @@ def cal_g2p( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, norms[i], cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + norms[i], + cal_error, + ), ) else: # print ('for norm is None') @@ -670,7 +748,17 @@ def cal_g2p( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, None, cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + None, + cal_error, + ), ) pool.close() print("Starting running the tasks...") @@ -723,8 +811,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -762,23 +852,35 @@ def cal_GPF( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) if np.min(ring_mask) == 0: qstart = 1 else: qstart = 0 - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[qstart:] if norm is not None: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + ) + ] for i in np.unique(ring_mask)[qstart:] ] @@ -792,7 +894,17 @@ def cal_GPF( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, norms[i], cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + norms[i], + cal_error, + ), ) else: # print ('for norm is None') @@ -800,7 +912,17 @@ def cal_GPF( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, None, cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + None, + cal_error, + ), ) pool.close() print("Starting running the tasks...") @@ -871,8 +993,10 @@ def get_g2_from_ROI_GPF(G, P, F, roi_mask): g2[:g_max, i - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) return g2, g2_err @@ -934,7 +1058,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/chx_correlationp2.py b/pyCHX/chx_correlationp2.py index 8ddbc19..b9c5e0e 100644 --- a/pyCHX/chx_correlationp2.py +++ b/pyCHX/chx_correlationp2.py @@ -9,21 +9,17 @@ from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_correlationc import _one_time_process as _one_time_processp from pyCHX.chx_correlationc import _one_time_process_error as _one_time_process_errorp from pyCHX.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq +from pyCHX.chx_correlationc import _validate_and_transform_inputs from pyCHX.chx_libs import tqdm logger = logging.getLogger(__name__) @@ -95,7 +91,14 @@ def __setstate__(self, state): def lazy_two_timep( - FD, num_levels, num_bufs, labels, internal_state=None, bad_frame_list=None, imgsum=None, norm=None + FD, + num_levels, + num_bufs, + labels, + internal_state=None, + bad_frame_list=None, + imgsum=None, + norm=None, ): """Generator implementation of two-time correlation If you do not want multi-tau correlation, set num_levels to 1 and @@ -125,7 +128,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -226,7 +229,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -259,7 +265,16 @@ def lazy_two_timep( return s.g2, s.lag_steps -def cal_c12p(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_lev=None, imgsum=None, norm=None): +def cal_c12p( + FD, + ring_mask, + bad_frame_list=None, + good_start=0, + num_buf=8, + num_lev=None, + imgsum=None, + norm=None, +): """calculation g2 by using a multi-tau algorithm for a compressed file with parallel calculation """ @@ -269,17 +284,29 @@ def cal_c12p(FD, ring_mask, bad_frame_list=None, good_start=0, num_buf=8, num_le pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + ) + ] for i in np.unique(ring_mask)[1:] ] inputs = range(len(ring_masks)) @@ -344,7 +371,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -396,7 +423,10 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): lev_len, ) if cal_error: - self.G_all = np.zeros((int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64) + self.G_all = np.zeros( + (int((num_levels + 1) * num_bufs / 2), len(pixel_list)), + dtype=np.float64, + ) # matrix for normalizing G into g2 self.past_intensity_all = np.zeros_like(self.G_all) # matrix for normalizing G into g2 @@ -586,7 +616,13 @@ def lazy_one_timep( # return results(g2, s.lag_steps[:g_max], s) if cal_error: # return g2, s.lag_steps[:g_max], s.G[:g_max],s.past_intensity[:g_max], s.future_intensity[:g_max] #, s - return (None, s.lag_steps, s.G_all, s.past_intensity_all, s.future_intensity_all) # , s ) + return ( + None, + s.lag_steps, + s.G_all, + s.past_intensity_all, + s.future_intensity_all, + ) # , s ) else: return g2, s.lag_steps[:g_max] # , s @@ -612,19 +648,31 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] if norm is not None: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + ) + ] for i in np.unique(ring_mask)[1:] ] @@ -639,7 +687,17 @@ def cal_g2p( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, norms[i], cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + norms[i], + cal_error, + ), ) else: # print ('for norm is None') @@ -647,7 +705,17 @@ def cal_g2p( results[i] = apply_async( pool, lazy_one_timep, - (FD, num_lev, num_buf, ring_masks[i], internal_state, bad_frame_list, imgsum, None, cal_error), + ( + FD, + num_lev, + num_buf, + ring_masks[i], + internal_state, + bad_frame_list, + imgsum, + None, + cal_error, + ), ) pool.close() print("Starting running the tasks...") @@ -703,8 +771,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -719,7 +789,14 @@ def cal_g2p( del res if cal_error: print("G2 with error bar calculation DONE!") - return g2[:Gmax, :], lag_steps_err[:Gmax], g2_err[:Gmax, :] / np.sqrt(nopr), g2_G, g2_P, g2_F + return ( + g2[:Gmax, :], + lag_steps_err[:Gmax], + g2_err[:Gmax, :] / np.sqrt(nopr), + g2_G, + g2_P, + g2_F, + ) else: print("G2 calculation DONE!") return g2, lag_steps @@ -781,7 +858,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/chx_crosscor.py b/pyCHX/chx_crosscor.py index c95d417..dd277b7 100644 --- a/pyCHX/chx_crosscor.py +++ b/pyCHX/chx_crosscor.py @@ -7,16 +7,14 @@ """ This module is for functions specific to spatial correlation in order to tackle the motion of speckles """ + from __future__ import absolute_import, division, print_function -from collections import namedtuple import numpy as np from scipy.signal import fftconvolve -from skbeam.core.roi import extract_label_indices # from __future__ import absolute_import, division, print_function -from skbeam.core.utils import multi_tau_lags # for a convenient status bar try: @@ -33,7 +31,12 @@ def tqdm(iterator): def get_cor_region(cor, cij, qid, fitw): """YG developed@CHX July/2019, Get a rectangle region of the cor class by giving center and width""" ceni = cor.centers[qid] - x1, x2, y1, y2 = max(0, ceni[0] - fitw), ceni[0] + fitw, max(0, ceni[1] - fitw), ceni[1] + fitw + x1, x2, y1, y2 = ( + max(0, ceni[0] - fitw), + ceni[0] + fitw, + max(0, ceni[1] - fitw), + ceni[1] + fitw, + ) return cij[qid][x1:x2, y1:y2] @@ -81,7 +84,9 @@ def direct_corss_cor(im1, im2): d1 = im1[j:, i:] d2 = im2[:-j, :-i] # print(i,j) - C[i + Nx, j + Ny] = np.sum(d1 * d2) / (np.average(d1) * np.average(d2) * d1.size) + C[i + Nx, j + Ny] = np.sum(d1 * d2) / ( + np.average(d1) * np.average(d2) * d1.size + ) return C.T @@ -303,7 +308,9 @@ def __call__(self, img1, img2=None, normalization=None, check_res=False): if self_correlation: ccorr[w] /= maskcor[w] * np.average(tmpimg[w]) ** 2 else: - ccorr[w] /= maskcor[w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ccorr[w] /= ( + maskcor[w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ) if check_res: if reg == 0: self.ckn = ccorr.copy() @@ -337,55 +344,15 @@ def _centered(img, sz): import threading -import warnings # from . import sigtools -import numpy as np from numpy import ( - allclose, - angle, - arange, - argsort, array, asarray, - atleast_1d, - atleast_2d, - dot, - exp, - expand_dims, - iscomplexobj, - isscalar, - mean, - ndarray, - newaxis, - ones, - pi, - poly, - polyadd, - polyder, - polydiv, - polymul, - polysub, - polyval, - prod, - r_, - ravel, - real_if_close, - reshape, - roots, - sort, - sum, - take, - transpose, - unique, - where, - zeros, - zeros_like, ) from numpy.fft import irfftn, rfftn from numpy.lib import NumpyVersion -from scipy import linalg -from scipy.fftpack import fft, fft2, fftfreq, fftn, ifft, ifft2, ifftn, ifftshift +from scipy.fftpack import fftn, ifftn # from ._arraytools import axis_slice, axis_reverse, odd_ext, even_ext, const_ext @@ -484,7 +451,9 @@ def fftconvolve_new(in1, in2, mode="full"): s1 = array(in1.shape) s2 = array(in2.shape) - complex_result = np.issubdtype(in1.dtype, np.complex) or np.issubdtype(in2.dtype, np.complex) + complex_result = np.issubdtype(in1.dtype, np.complex) or np.issubdtype( + in2.dtype, np.complex + ) shape = s1 + s2 - 1 if mode == "valid": @@ -518,7 +487,7 @@ def fftconvolve_new(in1, in2, mode="full"): elif mode == "valid": return _centered(ret, s1 - s2 + 1) else: - raise ValueError("Acceptable mode flags are 'valid'," " 'same', or 'full'.") + raise ValueError("Acceptable mode flags are 'valid', 'same', or 'full'.") def _cross_corr1(img1, img2=None): @@ -687,7 +656,10 @@ def __init__(self, shape, mask=None, normalization=None): self.positions.append(np.arange(maskcorr.shape[0]) - center[0]) elif mask.ndim == 2: self.positions.append( - [np.arange(maskcorr.shape[0]) - center[0], np.arange(maskcorr.shape[1]) - center[1]] + [ + np.arange(maskcorr.shape[0]) - center[0], + np.arange(maskcorr.shape[1]) - center[1], + ] ) if len(self.ids) == 1: @@ -747,9 +719,13 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): # do symmetric averaging Icorr = _cross_corr1(tmpimg * self.submasks[reg], self.submasks[reg]) if self_correlation: - Icorr2 = _cross_corr1(self.submasks[reg], tmpimg * self.submasks[reg]) + Icorr2 = _cross_corr1( + self.submasks[reg], tmpimg * self.submasks[reg] + ) else: - Icorr2 = _cross_corr1(self.submasks[reg], tmpimg2 * self.submasks[reg]) + Icorr2 = _cross_corr1( + self.submasks[reg], tmpimg2 * self.submasks[reg] + ) # there is an extra condition that Icorr*Icorr2 != 0 w = np.where(np.abs(Icorr * Icorr2) > 0) # DO WE NEED THIS (use i,j). ccorr[w] *= self.maskcorrs[reg][w] / Icorr[w] / Icorr2[w] @@ -761,7 +737,11 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): if self_correlation: ccorr[w] /= self.maskcorrs[reg][w] * np.average(tmpimg[w]) ** 2 else: - ccorr[w] /= self.maskcorrs[reg][w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ccorr[w] /= ( + self.maskcorrs[reg][w] + * np.average(tmpimg[w]) + * np.average(tmpimg2[w]) + ) ccorrs.append(ccorr) if len(ccorrs) == 1: @@ -773,9 +753,8 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): ##for parallel from multiprocessing import Pool -import dill -from pyCHX.chx_compress import apply_async, map_async +from pyCHX.chx_compress import apply_async def run_para_ccorr_sym(ccorr_sym, FD, nstart=0, nend=None, imgsum=None, img_norm=None): @@ -805,7 +784,10 @@ def run_para_ccorr_sym(ccorr_sym, FD, nstart=0, nend=None, imgsum=None, img_norm results[i] = apply_async( pool, ccorr_sym, - (FD.rdframe(i) / (imgsum[i] * img_norm), FD.rdframe(1 + i) / (imgsum[i + 1] * img_norm)), + ( + FD.rdframe(i) / (imgsum[i] * img_norm), + FD.rdframe(1 + i) / (imgsum[i + 1] * img_norm), + ), ) pool.close() print("Starting running the tasks...") diff --git a/pyCHX/chx_generic_functions.py b/pyCHX/chx_generic_functions.py index ea7f6cd..d6e98e3 100644 --- a/pyCHX/chx_generic_functions.py +++ b/pyCHX/chx_generic_functions.py @@ -1,18 +1,16 @@ import copy from datetime import datetime from os import listdir -from shutil import copyfile import matplotlib.cm as mcm import numpy as np import PIL import pytz import scipy -from matplotlib import cm from modest_image import imshow from scipy.special import erf -from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta, twotheta_to_q -from skimage.draw import disk, ellipse, line, line_aa, polygon +from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta +from skimage.draw import disk, ellipse, polygon from skimage.filters import prewitt # from tqdm import * @@ -87,7 +85,9 @@ def generate_h5_list(inDir, filename): for fp_ in fp: if ".h5" in fp_: append_txtfile(filename=filename, data=np.array([FP_ + "/" + fp_])) - print("The full path of all the .h5 in %s has been saved in %s." % (inDir, filename)) + print( + "The full path of all the .h5 in %s has been saved in %s." % (inDir, filename) + ) print("You can use ./analysis/run_gui to visualize all the h5 file.") @@ -103,7 +103,7 @@ def fit_one_peak_curve(x, y, fit_range=None): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -213,10 +213,18 @@ def get_roi_mask_qval_qwid_by_shift( """YG Dev April 22, 2019 Get roi_mask, qval_dict, qwid_dict by shift the pre-defined big roi_mask""" center = setup_pargs["center"] roi_mask1 = shift_mask( - new_cen=center, new_mask=new_mask, old_cen=old_cen, old_roi_mask=old_roi_mask, limit_qnum=limit_qnum + new_cen=center, + new_mask=new_mask, + old_cen=old_cen, + old_roi_mask=old_roi_mask, + limit_qnum=limit_qnum, ) qval_dict_, qwid_dict_ = get_masked_qval_qwid_dict_using_Rmax( - new_mask=new_mask, setup_pargs=setup_pargs, old_roi_mask=old_roi_mask, old_cen=old_cen, geometry=geometry + new_mask=new_mask, + setup_pargs=setup_pargs, + old_roi_mask=old_roi_mask, + old_cen=old_cen, + geometry=geometry, ) w, w1 = get_zero_nozero_qind_from_roi_mask(roi_mask1, new_mask) # print(w,w1) @@ -240,12 +248,21 @@ def get_zero_nozero_qind_from_roi_mask(roi_mask, mask): return w, w1 -def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, old_cen, geometry): +def get_masked_qval_qwid_dict_using_Rmax( + new_mask, setup_pargs, old_roi_mask, old_cen, geometry +): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask using a Rmax method""" cy, cx = setup_pargs["center"] my, mx = new_mask.shape Rmax = int( - np.ceil(max(np.hypot(cx, cy), np.hypot(cx - mx, cy - my), np.hypot(cx, cy - my), np.hypot(cx - mx, cy))) + np.ceil( + max( + np.hypot(cx, cy), + np.hypot(cx - mx, cy - my), + np.hypot(cx, cy - my), + np.hypot(cx - mx, cy), + ) + ) ) Fmask = np.zeros([Rmax * 2, Rmax * 2], dtype=int) Fmask[Rmax - cy : Rmax - cy + my, Rmax - cx : Rmax - cx + mx] = new_mask @@ -262,7 +279,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol "Ldet": setup_pargs["Ldet"], "lambda_": setup_pargs["lambda_"], } - qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict(roi_mask1, Fmask, setup_pargs_, geometry) + qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict( + roi_mask1, Fmask, setup_pargs_, geometry + ) # w = get_zero_qind_from_roi_mask(roi_mask1,Fmask) return qval_dict1, qwid_dict1 # ,w @@ -270,7 +289,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol def get_masked_qval_qwid_dict(roi_mask, mask, setup_pargs, geometry): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask""" - qval_dict_, qwid_dict_ = get_qval_qwid_dict(roi_mask, setup_pargs, geometry=geometry) + qval_dict_, qwid_dict_ = get_qval_qwid_dict( + roi_mask, setup_pargs, geometry=geometry + ) w, w1 = get_zero_nozero_qind_from_roi_mask(roi_mask, mask) qval_dictx = {k: v for (k, v) in list(qval_dict_.items()) if k not in w} qwid_dictx = {k: v for (k, v) in list(qwid_dict_.items()) if k not in w} @@ -288,7 +309,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): Input: roi_mask: integer type 2D array setup_pargs: dict, should at least contains, center (direct beam center), dpix (in mm), - lamda_: in A-1, Ldet: in mm + lambda_: in A-1, Ldet: in mm e.g., {'Ldet': 1495.0, abs #essential 'center': [-4469, 363], #essential @@ -300,7 +321,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): 'uid': 'uid=b85dad'} geometry: support saxs for isotropic transmission SAXS ang_saxs for anisotropic transmission SAXS - flow_saxs for anisotropic transmission SAXS under flow (center symetric) + flow_saxs for anisotropic transmission SAXS under flow (center symmetric) Return: qval_dict: dict, key as q-number, val: q val @@ -449,7 +470,12 @@ def shift_mask(new_cen, new_mask, old_cen, old_roi_mask, limit_qnum=None): """ nsx, nsy = new_mask.shape down, up, left, right = new_cen[0], nsx - new_cen[0], new_cen[1], nsy - new_cen[1] - x1, x2, y1, y2 = [old_cen[0] - down, old_cen[0] + up, old_cen[1] - left, old_cen[1] + right] + x1, x2, y1, y2 = [ + old_cen[0] - down, + old_cen[0] + up, + old_cen[1] - left, + old_cen[1] + right, + ] nroi_mask_ = old_roi_mask[x1:x2, y1:y2] * new_mask nroi_mask = np.zeros_like(nroi_mask_) qind, pixelist = roi.extract_label_indices(nroi_mask_) @@ -588,12 +614,16 @@ def plot_q_g2fitpara_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -623,10 +653,32 @@ def plot_q_g2fitpara_general( ax2 = fig.add_subplot(4, 1, 2) ax3 = fig.add_subplot(4, 1, 3) ax4 = fig.add_subplot(4, 1, 4) - plot1D(x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="") - plot1D(x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="") - plot1D(x=qi, y=baselinei, m="o", ls="--", c="g", ax=ax3, legend=r"$baseline$", title="") - plot1D(x=qi, y=relaxation_ratei, m="o", c="b", ls="--", ax=ax4, legend=r"$\gamma$ $(s^{-1})$", title="") + plot1D( + x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="" + ) + plot1D( + x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="" + ) + plot1D( + x=qi, + y=baselinei, + m="o", + ls="--", + c="g", + ax=ax3, + legend=r"$baseline$", + title="", + ) + plot1D( + x=qi, + y=relaxation_ratei, + m="o", + c="b", + ls="--", + ax=ax4, + legend=r"$\gamma$ $(s^{-1})$", + title="", + ) ax4.set_ylabel(r"$\gamma$ $(s^{-1})$") ax4.set_xlabel(r"$q $ $(\AA)$", fontsize=16) @@ -757,12 +809,12 @@ def plot_xy_x2( **kwargs, ): """YG.@CHX 2019/10/ Plot x, y, x2, if have, will plot as twiny( same y, different x) - This funciton is primary for plot q-Iq + This function is primary for plot q-Iq Input: x: one-d array, x in one unit y: one-d array, - x2:one-d array, x in anoter unit + x2:one-d array, x in another unit pargs: dict, could include 'uid', 'path' loglog: if True, if plot x and y in log, by default plot in y-log save: if True, save the plot in the path defined in pargs @@ -815,7 +867,9 @@ def plot_xy_x2( fig.savefig(fp, dpi=fig.dpi) -def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0): +def save_oavs_tifs( + uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0 +): """save oavs as png""" tifs = list(db[uid].data("OAV_image"))[0] try: @@ -832,14 +886,22 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 detectors = sorted(get_detectors(h)) for d in range(len(detectors)): try: - oav_period = h["descriptors"][d]["configuration"]["OAV"]["data"]["OAV_cam_acquire_period"] - oav_expt = h["descriptors"][d]["configuration"]["OAV"]["data"]["OAV_cam_acquire_time"] + oav_period = h["descriptors"][d]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_period" + ] + oav_expt = h["descriptors"][d]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_time" + ] except: pass oav_times = [] for i in range(len(oavs)): oav_times.append(oav_expt + i * oav_period) - fig = plt.subplots(int(np.ceil(len(oavs) / 3)), 3, figsize=(3 * 5.08, int(np.ceil(len(oavs) / 3)) * 4)) + fig = plt.subplots( + int(np.ceil(len(oavs) / 3)), + 3, + figsize=(3 * 5.08, int(np.ceil(len(oavs) / 3)) * 4), + ) for m in range(len(oavs)): plt.subplot(int(np.ceil(len(oavs) / 3)), 3, m + 1) # plt.subplots(figsize=(5.2,4)) @@ -855,11 +917,21 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 plt.imshow(rgb_cont_img, interpolation="none", resample=True, cmap="gray") plt.axis("equal") - cross = [685, 440, 50] # definintion of direct beam: x, y, size - plt.plot([cross[0] - cross[2] / 2, cross[0] + cross[2] / 2], [cross[1], cross[1]], "r-") - plt.plot([cross[0], cross[0]], [cross[1] - cross[2] / 2, cross[1] + cross[2] / 2], "r-") + cross = [685, 440, 50] # definition of direct beam: x, y, size + plt.plot( + [cross[0] - cross[2] / 2, cross[0] + cross[2] / 2], + [cross[1], cross[1]], + "r-", + ) + plt.plot( + [cross[0], cross[0]], + [cross[1] - cross[2] / 2, cross[1] + cross[2] / 2], + "r-", + ) if pixel_scalebar != None: - plt.plot([1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5) # scale bar. + plt.plot( + [1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5 + ) # scale bar. plt.text(1000, 50, text_string, fontsize=14, color="r") plt.text(600, 50, str(oav_times[m])[:5] + " [s]", fontsize=14, color="r") plt.axis("off") @@ -902,10 +974,16 @@ def get_current_time(): def evalue_array(array, verbose=True): """Y.G., Dev Nov 1, 2018 Get min, max, avg, std of an array""" - _min, _max, avg, std = np.min(array), np.max(array), np.average(array), np.std(array) + _min, _max, avg, std = ( + np.min(array), + np.max(array), + np.average(array), + np.std(array), + ) if verbose: print( - "The min, max, avg, std of this array are: %s %s %s %s, respectively." % (_min, _max, avg, std) + "The min, max, avg, std of this array are: %s %s %s %s, respectively." + % (_min, _max, avg, std) ) return _min, _max, avg, std @@ -922,7 +1000,10 @@ def find_good_xpcs_uids(fuids, Nlim=100, det=["4m", "1m", "500"]): """ guids = [] for i, uid in enumerate(fuids): - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): head = db[uid]["start"] for dec in head["detectors"]: for dt in det: @@ -956,7 +1037,9 @@ def create_fullImg_with_box( roi_mask = np.zeros(shape, dtype=np.int32) for i in range(box_nx): for j in range(box_ny): - roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = i * box_ny + j + 1 + roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = ( + i * box_ny + j + 1 + ) # roi_mask *= mask return roi_mask @@ -1005,7 +1088,11 @@ def lin2log_g2(lin_tau, lin_g2, num_points=False): # re-sample correlation function: log_g2 = [] for i in range(log_tau.size - 1): - y = [i, log_tau[i] - (log_tau[i + 1] - log_tau[i]) / 2, log_tau[i] + (log_tau[i + 1] - log_tau[i]) / 2] + y = [ + i, + log_tau[i] - (log_tau[i + 1] - log_tau[i]) / 2, + log_tau[i] + (log_tau[i + 1] - log_tau[i]) / 2, + ] # x=lin_tau[lin_tau>y[1]] x1 = lin_tau > y[1] x2 = lin_tau < y[2] @@ -1017,7 +1104,11 @@ def lin2log_g2(lin_tau, lin_g2, num_points=False): log_g2.append(np.interp(log_tau[i], lin_tau, lin_g2)) if i == log_tau.size - 2: # print(log_tau[i+1]) - y = [i + 1, log_tau[i + 1] - (log_tau[i + 1] - log_tau[i]) / 2, log_tau[i + 1]] + y = [ + i + 1, + log_tau[i + 1] - (log_tau[i + 1] - log_tau[i]) / 2, + log_tau[i + 1], + ] x1 = lin_tau > y[1] x2 = lin_tau < y[2] x = x1 * x2 @@ -1051,7 +1142,10 @@ def copy_data(old_path, new_path="/tmp_data/data/"): for fp in tqdm(fps): if not os.path.exists(new_path + os.path.basename(fp)): shutil.copy(fp, new_path) - print("The files %s are copied: %s." % (old_path[:-10] + "*", new_path + os.path.basename(fp))) + print( + "The files %s are copied: %s." + % (old_path[:-10] + "*", new_path + os.path.basename(fp)) + ) def delete_data(old_path, new_path="/tmp_data/data/"): @@ -1061,7 +1155,6 @@ def delete_data(old_path, new_path="/tmp_data/data/"): new_path: the new path """ import glob - import shutil # old_path = sud[2][0] # new_path = '/tmp_data/data/' @@ -1073,12 +1166,20 @@ def delete_data(old_path, new_path="/tmp_data/data/"): def show_tif_series( - tif_series, Nx=None, center=None, w=50, vmin=None, vmax=None, cmap=cmap_vge_hdr, logs=False, figsize=[10, 16] + tif_series, + Nx=None, + center=None, + w=50, + vmin=None, + vmax=None, + cmap=cmap_vge_hdr, + logs=False, + figsize=[10, 16], ): """ tif_series: list of 2D tiff images - Nx: the number in the row for dispalying - center: the center of iamge (or direct beam pixel) + Nx: the number in the row for displaying + center: the center of image (or direct beam pixel) w: the ROI half size in pixel vmin: the min intensity value for plot vmax: if None, will be max intensity value of the ROI @@ -1121,9 +1222,6 @@ def show_tif_series( return fig, ax -from scipy.special import erf - - def ps(y, shift=0.5, replot=True, logplot="off", x=None): """ Dev 16, 2018 @@ -1158,7 +1256,10 @@ def is_positive(num): for i in range(len(y)): current_positive = is_positive(ym[i]) if current_positive != positive: - list_of_roots.append(x[i - 1] + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1])) + list_of_roots.append( + x[i - 1] + + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1]) + ) positive = not positive if len(list_of_roots) >= 2: FWHM = abs(list_of_roots[-1] - list_of_roots[0]) @@ -1275,13 +1376,19 @@ def create_seg_ring(ring_edges, ang_edges, mask, setup_pargs): flow_geometry=False, ) - roi_mask, good_ind = combine_two_roi_mask(roi_mask_qr, roi_mask_ang, pixel_num_thres=100) - qval_dict_ = get_qval_dict(qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False) + roi_mask, good_ind = combine_two_roi_mask( + roi_mask_qr, roi_mask_ang, pixel_num_thres=100 + ) + qval_dict_ = get_qval_dict( + qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False + ) qval_dict = {i: qval_dict_[k] for (i, k) in enumerate(good_ind)} return roi_mask, qval_dict -def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True): +def find_bad_pixels_FD( + bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True +): """Designed to find bad pixel list in 500K threshold: the max intensity in 5K """ @@ -1305,7 +1412,7 @@ def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=15): """DEV by Yugang@CHX, June 6, 2019 Get circular average of a time series using a dynamics mask, which pixel values are defined as - zeors if above a threshold. + zeros if above a threshold. Return an averaged q(pix)-Iq-q(A-1) of the whole time series using bin frames with bin_number Input: FD: the multifile handler for the time series @@ -1315,7 +1422,7 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 'dpix', 'Ldet','lambda_', 'center' bin_number: bin number of the frame threshold: define the dynamics mask, which pixel values are defined as - zeors if above this threshold + zeros if above this threshold Output: qp_saxs: q in pixel iq_saxs: intenstity @@ -1327,22 +1434,37 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 Nimg_ = FD.end - FD.beg # Nimg_ = 100 Nimg = Nimg_ // bin_number - time_edge = np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + beg + time_edge = ( + np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + + beg + ) for n in tqdm(range(Nimg)): t1, t2 = time_edge[n] # print(t1,t2) if bin_number == 1: avg_imgi = FD.rdframe(t1) else: - avg_imgi = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + avg_imgi = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) badpi = find_bad_pixels_FD( - np.arange(t1, t2), FD, img_shape=avg_imgi.shape, threshold=threshold, show_progress=False + np.arange(t1, t2), + FD, + img_shape=avg_imgi.shape, + threshold=threshold, + show_progress=False, ) img = avg_imgi * mask * badpi - qp_saxsi, iq_saxsi, q_saxsi = get_circular_average(img, mask * badpi, save=False, pargs=setup_pargs) + qp_saxsi, iq_saxsi, q_saxsi = get_circular_average( + img, mask * badpi, save=False, pargs=setup_pargs + ) # print( img.max()) if t1 == FD.beg: - qp_saxs, iq_saxs, q_saxs = np.zeros_like(qp_saxsi), np.zeros_like(iq_saxsi), np.zeros_like(q_saxsi) + qp_saxs, iq_saxs, q_saxs = ( + np.zeros_like(qp_saxsi), + np.zeros_like(iq_saxsi), + np.zeros_like(q_saxsi), + ) qp_saxs += qp_saxsi iq_saxs += iq_saxsi q_saxs += q_saxsi @@ -1388,10 +1510,10 @@ def get_img_from_iq(qp, iq, img_shape, center): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1414,10 +1536,10 @@ def average_array_withNan(array, axis=0, mask=None): def deviation_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Get the deviation of array invovling np.nan along axis + Get the deviation of array involving np.nan along axis Input: - array: ND array + array: AND array axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1500,10 +1622,14 @@ def get_echos(dat_arr, min_distance=10): """ from skimage.feature import peak_local_max - max_ind = peak_local_max(dat_arr, min_distance) # !!! careful, skimage function reverses the order (wtf?) + max_ind = peak_local_max( + dat_arr, min_distance + ) # !!! careful, skimage function reverses the order (wtf?) min_ind = [] for i in range(len(max_ind[:-1])): - min_ind.append(max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]])) + min_ind.append( + max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]]) + ) # unfortunately, skimage function fu$$s up the format: max_ind is an array of a list of lists...fix this: mmax_ind = [] for l in max_ind: @@ -1516,10 +1642,10 @@ def pad_length(arr, pad_val=np.nan): """ arr: 2D matrix pad_val: values being padded - adds pad_val to each row, to make the length of each row equal to the lenght of the longest row of the original matrix + adds pad_val to each row, to make the length of each row equal to the length of the longest row of the original matrix -> used to convert python generic data object to HDF5 native format function fixes python bug in padding (np.pad) integer array with np.nan - update June 2023: remove use of np.shape and np.size that doesn't work (anymore?) on arrays with inhomogenous size + update June 2023: remove use of np.shape and np.size that doesn't work (anymore?) on arrays with inhomogeneous size by LW 12/30/2017 """ max_len = [] @@ -1527,7 +1653,12 @@ def pad_length(arr, pad_val=np.nan): max_len.append([len(arr[i])]) max_len = np.max(max_len) for l in range(len(arr)): - arr[l] = np.pad(arr[l] * 1.0, (0, max_len - np.size(arr[l])), mode="constant", constant_values=pad_val) + arr[l] = np.pad( + arr[l] * 1.0, + (0, max_len - np.size(arr[l])), + mode="constant", + constant_values=pad_val, + ) return arr @@ -1552,8 +1683,8 @@ def ls_dir(inDir, have_list=[], exclude_list=[]): """Y.G. Aug 1, 2019 List all filenames in a filefolder inDir: fullpath of the inDir - have_string: only retrun filename containing the string - exclude_string: only retrun filename not containing the string + have_string: only return filename containing the string + exclude_string: only return filename not containing the string """ from os import listdir @@ -1579,7 +1710,7 @@ def ls_dir2(inDir, string=None): """Y.G. Nov 1, 2017 List all filenames in a filefolder (not include hidden files and subfolders) inDir: fullpath of the inDir - string: if not None, only retrun filename containing the string + string: if not None, only return filename containing the string """ from os import listdir from os.path import isfile, join @@ -1587,7 +1718,9 @@ def ls_dir2(inDir, string=None): if string == None: tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) else: - tifs = np.array([f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)]) + tifs = np.array( + [f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)] + ) return tifs @@ -1625,7 +1758,17 @@ def re_filename_dir(old_pattern, new_pattern, inDir, verbose=True): re_filename(old_filename, new_filename, inDir, verbose=verbose) -def get_roi_nr(qdict, q, phi, q_nr=True, phi_nr=False, q_thresh=0, p_thresh=0, silent=True, qprecision=5): +def get_roi_nr( + qdict, + q, + phi, + q_nr=True, + phi_nr=False, + q_thresh=0, + p_thresh=0, + silent=True, + qprecision=5, +): """ function to return roi number from qval_dict, corresponding Q and phi, lists (sets) of all available Qs and phis [roi_nr,Q,phi,Q_list,phi_list]=get_roi_nr(..) @@ -1660,13 +1803,17 @@ def get_roi_nr(qdict, q, phi, q_nr=True, phi_nr=False, q_thresh=0, p_thresh=0, s qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] else: qinterest = q - qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] # new + qindices = [ + i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh + ] # new if phi_nr: phiinterest = phislist[phi] phiindices = [i for i, x in enumerate(phis) if x == phiinterest] else: phiinterest = phi - phiindices = [i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh] # new + phiindices = [ + i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh + ] # new ret_list = [ list(set(qindices).intersection(phiindices))[0], qinterest, @@ -1679,7 +1826,14 @@ def get_roi_nr(qdict, q, phi, q_nr=True, phi_nr=False, q_thresh=0, p_thresh=0, s print(qslist) print("list of available phis:") print(phislist) - print("Roi number for Q= " + str(ret_list[1]) + " and phi= " + str(ret_list[2]) + ": " + str(ret_list[0])) + print( + "Roi number for Q= " + + str(ret_list[1]) + + " and phi= " + + str(ret_list[2]) + + ": " + + str(ret_list[0]) + ) return ret_list @@ -1690,7 +1844,7 @@ def get_fit_by_two_linear( mid_xpoint2=None, xrange=None, ): - """YG Octo 16,2017 Fit a curve with two linear func, the curve is splitted by mid_xpoint, + """YG Octo 16,2017 Fit a curve with two linear func, the curve is split by mid_xpoint, namely, fit the curve in two regions defined by (xmin,mid_xpoint ) and (mid_xpoint2, xmax) Input: x: 1D np.array @@ -1700,9 +1854,9 @@ def get_fit_by_two_linear( Return: D1, gmfit1, D2, gmfit2 : fit parameter (slope, background) of linear fit1 - convinent fit class, gmfit1(x) gives yvale + convenient fit class, gmfit1(x) gives yvale fit parameter (slope, background) of linear fit2 - convinent fit class, gmfit2(x) gives yvale + convenient fit class, gmfit2(x) gives yvale """ if xrange == None: @@ -1734,7 +1888,9 @@ def get_curve_turning_points( """YG Octo 16,2017 Get a turning point of a curve by doing a two-linear fit """ - D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear(x, y, mid_xpoint1, mid_xpoint2, xrange) + D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear( + x, y, mid_xpoint1, mid_xpoint2, xrange + ) return get_cross_point(x, gmfit1, gmfit2) @@ -1742,7 +1898,9 @@ def plot_fit_two_linear_fit(x, y, gmfit1, gmfit2, ax=None): """YG Octo 16,2017 Plot data with two fitted linear func""" if ax == None: fig, ax = plt.subplots() - plot1D(x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="") # logx=True, logy=True ) + plot1D( + x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="" + ) # logx=True, logy=True ) plot1D(x=x, y=gmfit1(x), ax=ax, c="r", m="", ls="-", legend="fit1") plot1D(x=x, y=gmfit2(x), ax=ax, c="b", m="", ls="-", legend="fit2") return ax @@ -1754,7 +1912,10 @@ def linear_fit(x, y, xrange=None): """ if xrange != None: xmin, xmax = xrange - x1, x2 = find_index(x, xmin, tolerance=None), find_index(x, xmax, tolerance=None) + x1, x2 = ( + find_index(x, xmin, tolerance=None), + find_index(x, xmax, tolerance=None), + ) x_ = x[x1:x2] y_ = y[x1:x2] else: @@ -1866,22 +2027,32 @@ def sgolay2d(z, window_size, order, derivative=None): Z = np.zeros((new_shape)) # top band band = z[0, :] - Z[:half_size, half_size:-half_size] = band - np.abs(np.flipud(z[1 : half_size + 1, :]) - band) + Z[:half_size, half_size:-half_size] = band - np.abs( + np.flipud(z[1 : half_size + 1, :]) - band + ) # bottom band band = z[-1, :] - Z[-half_size:, half_size:-half_size] = band + np.abs(np.flipud(z[-half_size - 1 : -1, :]) - band) + Z[-half_size:, half_size:-half_size] = band + np.abs( + np.flipud(z[-half_size - 1 : -1, :]) - band + ) # left band band = np.tile(z[:, 0].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, :half_size] = band - np.abs(np.fliplr(z[:, 1 : half_size + 1]) - band) + Z[half_size:-half_size, :half_size] = band - np.abs( + np.fliplr(z[:, 1 : half_size + 1]) - band + ) # right band band = np.tile(z[:, -1].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, -half_size:] = band + np.abs(np.fliplr(z[:, -half_size - 1 : -1]) - band) + Z[half_size:-half_size, -half_size:] = band + np.abs( + np.fliplr(z[:, -half_size - 1 : -1]) - band + ) # central band Z[half_size:-half_size, half_size:-half_size] = z # top left corner band = z[0, 0] - Z[:half_size, :half_size] = band - np.abs(np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band) + Z[:half_size, :half_size] = band - np.abs( + np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band + ) # bottom right corner band = z[-1, -1] Z[-half_size:, -half_size:] = band + np.abs( @@ -1890,10 +2061,14 @@ def sgolay2d(z, window_size, order, derivative=None): # top right corner band = Z[half_size, -half_size:] - Z[:half_size, -half_size:] = band - np.abs(np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band) + Z[:half_size, -half_size:] = band - np.abs( + np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band + ) # bottom left corner band = Z[-half_size:, half_size].reshape(-1, 1) - Z[-half_size:, :half_size] = band - np.abs(np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band) + Z[-half_size:, :half_size] = band - np.abs( + np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band + ) # solve system and convolve if derivative == None: @@ -1908,7 +2083,9 @@ def sgolay2d(z, window_size, order, derivative=None): elif derivative == "both": c = np.linalg.pinv(A)[1].reshape((window_size, -1)) r = np.linalg.pinv(A)[2].reshape((window_size, -1)) - return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve(Z, -c, mode="valid") + return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve( + Z, -c, mode="valid" + ) def load_filelines(fullpath): @@ -1943,7 +2120,7 @@ def extract_data_from_file( good_line_pattern: str, data will be extract below this good_line_pattern Or giving start_row: int good_cols: list of integer, good index of cols - lables: the label of the good_cols + labels: the label of the good_cols #save: False, if True will save the data into a csv file with filename appending csv ?? Return: a pds.dataframe @@ -2027,7 +2204,7 @@ def get_print_uids(start_time, stop_time, return_all_info=False): def get_last_uids(n=-1): """YG Sep 26, 2017 - A Convinient function to copy uid to jupyter for analysis""" + A Convenient function to copy uid to jupyter for analysis""" uid = db[n]["start"]["uid"][:8] sid = db[n]["start"]["scan_id"] m = db[n]["start"]["Measurement"] @@ -2042,14 +2219,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -2190,7 +2369,9 @@ def get_mass_center_one_roi(FD, roi_mask, roi_ind): m = roi_mask == roi_ind cx, cy = np.zeros(int((FD.end - FD.beg) / 1)), np.zeros(int((FD.end - FD.beg) / 1)) n = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame" + ): img = FD.rdframe(i) * m c = scipy.ndimage.measurements.center_of_mass(img) cx[n], cy[n] = int(c[0]), int(c[1]) @@ -2324,7 +2505,9 @@ def create_chip_edges_mask(det="1M"): return mask -def create_ellipse_donut(cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0): +def create_ellipse_donut( + cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0 +): Nmax = np.max(np.unique(roi_mask)) rr1, cc1 = ellipse(cy, cx, wy_inner, wx_inner) rr2, cc2 = ellipse(cy, cx, wy_inner + gap, wx_inner + gap) @@ -2390,10 +2573,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -2410,7 +2593,7 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): """ Get taus in log steps ( a multi-taus defined taus ) for a time series with max frame number as fra_max Parameters: - fra_max: integer, the maximun frame number + fra_max: integer, the maximum frame number buf_num (default=8), Return: taus_in_log, a list @@ -2424,12 +2607,14 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): return lag_steps[lag_steps < fra_max] -def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8): +def get_series_g2_taus( + fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8 +): """ Get taus for dose dependent analysis Parameters: fra_max_list: a list, a lsit of largest available frame number - acq_time: acquistion time for each frame + acq_time: acquisition time for each frame log_taus: if true, will use the multi-tau defined taus bu using buf_num (default=8), otherwise, use deltau =1 Return: @@ -2452,8 +2637,8 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True if n > L: warnings.warn( "Warning: the dose value is too large, and please" - "check the maxium dose in this data set and give a smaller dose value." - "We will use the maxium dose of the data." + "check the maximum dose in this data set and give a smaller dose value." + "We will use the maximum dose of the data." ) n = L if log_taus: @@ -2464,11 +2649,13 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True return tausd -def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5)): +def check_lost_metadata( + md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5) +): """Y.G. Dec 31, 2016, check lost metadata Parameter: - md: dict, meta data dictionay + md: dict, meta data dictionary Nimg: number of frames for this uid metadata inc_x0/y0: incident beam center x0/y0, if None, will over-write the md['beam_center_x/y'] pixelsize: if md don't have ['x_pixel_size'], the pixelsize will add it @@ -2516,11 +2703,20 @@ def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * timeperframe = acquisition_period if inc_x0 != None: mdn["beam_center_x"] = inc_y0 - print("Beam_center_x has been changed to %s. (no change in raw metadata): " % inc_y0) + print( + "Beam_center_x has been changed to %s. (no change in raw metadata): " + % inc_y0 + ) if inc_y0 != None: mdn["beam_center_y"] = inc_x0 - print("Beam_center_y has been changed to %s. (no change in raw metadata): " % inc_x0) - center = [int(mdn["beam_center_x"]), int(mdn["beam_center_y"])] # beam center [y,x] for python image + print( + "Beam_center_y has been changed to %s. (no change in raw metadata): " + % inc_x0 + ) + center = [ + int(mdn["beam_center_x"]), + int(mdn["beam_center_y"]), + ] # beam center [y,x] for python image center = [center[1], center[0]] return dpix, lambda_, Ldet, exposuretime, timeperframe, center @@ -2571,7 +2767,13 @@ def combine_images(filenames, outputfile, outsize=(2000, 2400)): print("The combined image is saved as: %s" % outputfile) -def get_qval_dict(qr_center, qz_center=None, qval_dict=None, multi_qr_for_one_qz=True, one_qz_multi_qr=True): +def get_qval_dict( + qr_center, + qz_center=None, + qval_dict=None, + multi_qr_for_one_qz=True, + one_qz_multi_qr=True, +): """Y.G. Dec 27, 2016 Map the roi label array with qr or (qr,qz) or (q//, q|-) values Parameters: @@ -2661,11 +2863,11 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): bad_uids_index: a list of known bad uid list, default is None Return: guids: list, good uids - buids, list, bad uids + builds, list, bad uids """ import random - buids = [] + builds = [] guids = list(uids) # print( guids ) if bad_uids_index == None: @@ -2679,20 +2881,23 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): imgsa = apply_mask(imgs, mask) avg_img = get_avg_img(imgsa, img_samp_index, plot_=False, uid=uid) if avg_img.max() == 0: - buids.append(uid) + builds.append(uid) guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) print("The bad uid is: %s" % uid) else: guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) - buids.append(uid) + builds.append(uid) print("The bad uid is: %s" % uid) - print("The total and bad uids number are %s and %s, repsectively." % (len(uids), len(buids))) - return guids, buids + print( + "The total and bad uids number are %s and %s, respectively." + % (len(uids), len(builds)) + ) + return guids, builds def find_uids(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: sids: list, scan id uids: list, uid with 8 character length @@ -2772,7 +2977,14 @@ def check_bad_data_points( fig = plt.figure() ax = fig.add_subplot(2, 1, 1) plot1D(d_, ax=ax, color="k", legend="data", legend_size=legend_size) - plot1D(pfit, ax=ax, color="b", legend="ploy-fit", title="Find Bad Points", legend_size=legend_size) + plot1D( + pfit, + ax=ax, + color="b", + legend="ploy-fit", + title="Find Bad Points", + legend_size=legend_size, + ) ax2 = fig.add_subplot(2, 1, 2) plot1D( @@ -2863,7 +3075,14 @@ def get_bad_frame_list( fig = plt.figure() ax = fig.add_subplot(2, 1, 1) plot1D(imgsum_, ax=ax, color="k", legend="data", legend_size=legend_size) - plot1D(pfit, ax=ax, color="b", legend="ploy-fit", title=uid + "_imgsum", legend_size=legend_size) + plot1D( + pfit, + ax=ax, + color="b", + legend="ploy-fit", + title=uid + "_imgsum", + legend_size=legend_size, + ) ax2 = fig.add_subplot(2, 1, 2) plot1D( @@ -2904,7 +3123,9 @@ def get_bad_frame_list( fp = path + "%s" % (uid) + "_imgsum_analysis" + ".png" plt.savefig(fp, dpi=fig.dpi) - bd2 = list(np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start) + bd2 = list( + np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start + ) if return_ylim: return np.array(bd1 + bd2 + bd3), ymin, ymax @@ -2980,7 +3201,7 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): kwargs: overwrite the meta data, for example get_meta_data( uid = uid, sample = 'test') --> will overwrtie the meta's sample to test return: - meta data of the uid: a dictionay + meta data of the uid: a dictionary with keys: detector suid: the simple given uid @@ -3044,8 +3265,12 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): md.update(header.start.items()) # print(header.start.time) - md["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"])) - md["stop_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"])) + md["start_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"]) + ) + md["stop_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"]) + ) try: # added: try to handle runs that don't contain image data if "primary" in header.v2: descriptor = header.v2["primary"].descriptors[0] @@ -3094,11 +3319,18 @@ def get_max_countc(FD, labeled_array): if labeled_array.shape != (FD.md["ncols"], FD.md["nrows"]): raise ValueError( " `image` shape (%d, %d) in FD is not equal to the labeled_array shape (%d, %d)" - % (FD.md["ncols"], FD.md["nrows"], labeled_array.shape[0], labeled_array.shape[1]) + % ( + FD.md["ncols"], + FD.md["nrows"], + labeled_array.shape[0], + labeled_array.shape[1], + ) ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): try: (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -3121,7 +3353,7 @@ def create_polygon_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3144,7 +3376,7 @@ def create_rectangle_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3154,7 +3386,9 @@ def create_rectangle_mask(image, xcorners, ycorners): return bst_mask -def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50, angles=[0]): +def create_multi_rotated_rectangle_mask( + image, center=None, length=100, width=50, angles=[0] +): """Developed at July 10, 2017 by Y.G.@CHX, NSLS2 Create multi rectangle-shaped mask by rotating a rectangle with a list of angles The original rectangle is defined by four corners, i.e., @@ -3183,13 +3417,22 @@ def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50 mask = np.zeros(image.shape, dtype=bool) wy = length wx = width - x = np.array([max(0, cx - wx // 2), min(imx, cx + wx // 2), min(imx, cx + wx // 2), max(0, cx - wx // 2)]) + x = np.array( + [ + max(0, cx - wx // 2), + min(imx, cx + wx // 2), + min(imx, cx + wx // 2), + max(0, cx - wx // 2), + ] + ) y = np.array([cy, cy, min(imy, cy + wy), min(imy, cy + wy)]) rr, cc = polygon(y, x, shape=image.shape) mask[rr, cc] = 1 mask_rot = np.zeros(image.shape, dtype=bool) for angle in angles: - mask_rot += np.array(rotate(mask, angle, center=center), dtype=bool) # , preserve_range=True) + mask_rot += np.array( + rotate(mask, angle, center=center), dtype=bool + ) # , preserve_range=True) return ~mask_rot @@ -3199,7 +3442,7 @@ def create_wedge(image, center, radius, wcors, acute_angle=True): wcors: [ [x1,x2,x3...], [y1,y2,y3..] """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cy, cx = center @@ -3223,12 +3466,19 @@ def create_wedge(image, center, radius, wcors, acute_angle=True): def create_cross_mask( - image, center, wy_left=4, wy_right=4, wx_up=4, wx_down=4, center_disk=True, center_radius=10 + image, + center, + wy_left=4, + wy_right=4, + wx_up=4, + wx_down=4, + center_disk=True, + center_radius=10, ): """ Give image and the beam center to create a cross-shaped mask wy_left: the width of left h-line - wy_right: the width of rigth h-line + wy_right: the width of right h-line wx_up: the width of up v-line wx_down: the width of down v-line center_disk: if True, create a disk with center and center_radius @@ -3236,7 +3486,7 @@ def create_cross_mask( Return: the cross mask """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cx, cy = center @@ -3292,7 +3542,10 @@ def generate_edge(centers, width): def export_scan_scalar( - uid, x="dcm_b", y=["xray_eye1_stats1_total"], path="/XF11ID/analysis/2016_3/commissioning/Results/" + uid, + x="dcm_b", + y=["xray_eye1_stats1_total"], + path="/XF11ID/analysis/2016_3/commissioning/Results/", ): """YG. 10/17/2016 export uid data to a txt file @@ -3397,14 +3650,21 @@ def get_sid_filenames(hdr, verbose=False): ) # looking for (eiger) datafile at the path specified in metadata if len(ret[2]) == 0: if verbose: - print('could not find detector filename from "data_path" in metadata: %s' % start_doc["data path"]) + print( + 'could not find detector filename from "data_path" in metadata: %s' + % start_doc["data path"] + ) else: if verbose: print('Found detector filename from "data_path" in metadata!') success = True - if not success: # looking at path in metadata, but taking the date from the run start document - data_path = start_doc["data path"][:-11] + strftime("%Y/%m/%d/", localtime(start_doc["time"])) + if ( + not success + ): # looking at path in metadata, but taking the date from the run start document + data_path = start_doc["data path"][:-11] + strftime( + "%Y/%m/%d/", localtime(start_doc["time"]) + ) ret = ( start_doc["scan_id"], start_doc["uid"], @@ -3418,10 +3678,10 @@ def get_sid_filenames(hdr, verbose=False): print("Found detector filename in %s" % data_path) success = True - if ( - not success - ): # looking at path in metadata, but taking the date from the run stop document (in case the date rolled over between creating the start doc and staging the detector) - data_path = start_doc["data path"][:-11] + strftime("%Y/%m/%d/", localtime(stop_doc["time"])) + if not success: # looking at path in metadata, but taking the date from the run stop document (in case the date rolled over between creating the start doc and staging the detector) + data_path = start_doc["data path"][:-11] + strftime( + "%Y/%m/%d/", localtime(stop_doc["time"]) + ) ret = ( start_doc["scan_id"], start_doc["uid"], @@ -3439,7 +3699,7 @@ def get_sid_filenames(hdr, verbose=False): # def get_sid_filenames(header): # """YG. Dev Jan, 2016 -# Get a bluesky scan_id, unique_id, filename by giveing uid +# Get a bluesky scan_id, unique_id, filename by giving uid # Parameters # ---------- @@ -3449,9 +3709,9 @@ def get_sid_filenames(hdr, verbose=False): # ------- # scan_id: integer # unique_id: string, a full string of a uid -# filename: sring +# filename: string -# Usuage: +# Usage: # sid,uid, filenames = get_sid_filenames(db[uid]) # """ @@ -3513,7 +3773,11 @@ def load_dask_data(uid, detector, mask_path_full, reverse=False, rot90=False): img_md = {} for k in list(img_md_dict.keys()): img_md[k] = hdr.config_data(det)["primary"][0]["%s_%s" % (det, img_md_dict[k])] - if detector in ["eiger4m_single_image", "eiger1m_single_image", "eiger500K_single_image"]: + if detector in [ + "eiger4m_single_image", + "eiger1m_single_image", + "eiger500K_single_image", + ]: img_md.update({"y_pixel_size": 7.5e-05, "x_pixel_size": 7.5e-05}) got_pixel_mask = True else: @@ -3522,7 +3786,9 @@ def load_dask_data(uid, detector, mask_path_full, reverse=False, rot90=False): # load pixel mask from static location if got_pixel_mask: # json_open = open(_mask_path_ + "pixel_masks/pixel_mask_compression_%s.json" % detector.split("_")[0]) - json_open = open(mask_path_full + "pixel_mask_compression_%s.json" % detector.split("_")[0]) + json_open = open( + mask_path_full + "pixel_mask_compression_%s.json" % detector.split("_")[0] + ) mask_dict = json.load(json_open) img_md["pixel_mask"] = np.array(mask_dict["pixel_mask"]) img_md["binary_mask"] = np.array(mask_dict["binary_mask"]) @@ -3537,8 +3803,10 @@ def load_dask_data(uid, detector, mask_path_full, reverse=False, rot90=False): return dimg, img_md -def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False): - """load bluesky scan data by giveing uid and detector +def load_data( + uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False +): + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3552,7 +3820,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3597,7 +3865,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro def mask_badpixels(mask, detector): """ - Mask known bad pixel from the giveing mask + Mask known bad pixel from the giving mask """ if detector == "eiger1m_single_image": @@ -3622,7 +3890,7 @@ def mask_badpixels(mask, detector): def load_data2(uid, detector="eiger4m_single_image"): - """load bluesky scan data by giveing uid and detector + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3634,7 +3902,7 @@ def load_data2(uid, detector="eiger4m_single_image"): image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3650,7 +3918,7 @@ def load_data2(uid, detector="eiger4m_single_image"): if flag: print("Can't Load Data!") - uid = "00000" # in case of failling load data + uid = "00000" # in case of failing load data imgs = 0 else: imgs = ev["data"][detector] @@ -3684,7 +3952,9 @@ def pload_obj(filename): return pickle.load(f) -def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs): +def load_mask( + path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs +): """load a mask file the mask is a numpy binary file (.npy) @@ -3692,14 +3962,14 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * ---------- path: the path of the mask file mask_name: the name of the mask file - plot_: a boolen type + plot_: a boolean type reverse: if True, reverse the image upside down to match the "real" image geometry (should always be True in the future) Returns ------- mask: array if plot_ =True, will show the mask - Usuage: + Usage: mask = load_mask( path, mask_name, plot_ = True ) """ @@ -3714,7 +3984,9 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * return mask -def create_hot_pixel_mask(img, threshold, center=None, center_radius=300, outer_radius=0): +def create_hot_pixel_mask( + img, threshold, center=None, center_radius=300, outer_radius=0 +): """create a hot pixel mask by giving threshold Input: img: the image to create hot pixel mask @@ -3867,7 +4139,13 @@ def show_img( ) # vmin=0,vmax=1, else: im = ax.imshow( - image, origin=origin, cmap=cmap, interpolation=interpolation, vmin=vmin, vmax=vmax, extent=extent + image, + origin=origin, + cmap=cmap, + interpolation=interpolation, + vmin=vmin, + vmax=vmax, + extent=extent, ) # vmin=0,vmax=1, else: if not use_mat_imshow: @@ -3890,7 +4168,9 @@ def show_img( extent=extent, ) if label_array != None: - im2 = show_label_array(ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation) + im2 = show_label_array( + ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation + ) ax.set_title(image_name) if xlim != None: @@ -3902,7 +4182,6 @@ def show_img( ax.set_yticks([]) ax.set_xticks([]) else: - ax.tick_params(axis="both", which="major", labelsize=tick_size) ax.tick_params(axis="both", which="minor", labelsize=tick_size) # mpl.rcParams['xtick.labelsize'] = tick_size @@ -3922,13 +4201,21 @@ def show_img( ax.set_aspect(aspect="auto") if show_colorbar: - cbar = fig.colorbar(im, extend="neither", spacing="proportional", orientation="vertical") + cbar = fig.colorbar( + im, extend="neither", spacing="proportional", orientation="vertical" + ) cbar.ax.tick_params(labelsize=colorbar_fontsize) fig.set_tight_layout(tight) if save: if show_time: dt = datetime.now() - CurTime = "_%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "_%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) fp = path + "%s" % (file_name) + CurTime + "." + save_format else: fp = path + "%s" % (image_name) + "." + save_format @@ -3963,7 +4250,7 @@ def plot1D( ---------- y: column-y x: column-x, by default x=None, the plot will use index of y as x-axis - the other paramaters are defined same as plt.plot + the other parameters are defined same as plt.plot Returns ------- None @@ -4079,7 +4366,9 @@ def plot1D( ### -def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs): +def check_shutter_open( + data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs +): """Check the first frame with shutter open Parameters @@ -4091,11 +4380,13 @@ def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, return: shutter_open_frame: a integer, the first frame number with open shutter - Usuage: + Usage: good_start = check_shutter_open( imgsa, min_inten=5, time_edge = [0,20], plot_ = False ) """ - imgsum = np.array([np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]]) + imgsum = np.array( + [np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]] + ) if plot_: fig, ax = plt.subplots() ax.plot(imgsum, "bo") @@ -4109,18 +4400,26 @@ def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, def get_each_frame_intensity( - data_series, sampling=50, bad_pixel_threshold=1e10, plot_=False, save=False, *argv, **kwargs + data_series, + sampling=50, + bad_pixel_threshold=1e10, + plot_=False, + save=False, + *argv, + **kwargs, ): """Get the total intensity of each frame by sampling every N frames Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: + Usage: imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = 1000, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -4184,7 +4483,9 @@ def create_time_slice(N, slice_num, slice_width, edges=None): return np.array(time_edge) -def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs): +def show_label_array( + ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs +): """ YG. Sep 26, 2017 Modified show_label_array(ax, label_array, cmap=None, **kwargs) @@ -4212,7 +4513,9 @@ def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nea _cmap = copy.copy((mcm.get_cmap(cmap))) _cmap.set_under("w", 0) vmin = max(0.5, kwargs.pop("vmin", 0.5)) - im = ax.imshow(label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs) + im = ax.imshow( + label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs + ) if aspect == None: ax.set_aspect(aspect="auto") # ax.set_aspect('equal') @@ -4264,10 +4567,21 @@ def show_label_array_on_image( # print (vmin, vmax ) if log_img: im = ax.imshow( - image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(vmin, vmax), **kwargs + image, + cmap=imshow_cmap, + interpolation="none", + norm=LogNorm(vmin, vmax), + **kwargs, ) # norm=norm, else: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", vmin=vmin, vmax=vmax, **kwargs) # norm=norm, + im = ax.imshow( + image, + cmap=imshow_cmap, + interpolation="none", + vmin=vmin, + vmax=vmax, + **kwargs, + ) # norm=norm, im_label = mpl_plot.show_label_array( ax, label_array, cmap=cmap, vmin=vmin, vmax=vmax, alpha=alpha, **kwargs @@ -4375,10 +4689,42 @@ def show_ROI_on_image( # print (xval, y) axes.text(x_val, y_val, c, color="b", va="center", ha="center") if show_ang_cor: - axes.text(-0.0, 0.5, "-/+180" + r"$^0$", color="r", va="center", ha="center", transform=axes.transAxes) - axes.text(1.0, 0.5, "0" + r"$^0$", color="r", va="center", ha="center", transform=axes.transAxes) - axes.text(0.5, -0.0, "-90" + r"$^0$", color="r", va="center", ha="center", transform=axes.transAxes) - axes.text(0.5, 1.0, "90" + r"$^0$", color="r", va="center", ha="center", transform=axes.transAxes) + axes.text( + -0.0, + 0.5, + "-/+180" + r"$^0$", + color="r", + va="center", + ha="center", + transform=axes.transAxes, + ) + axes.text( + 1.0, + 0.5, + "0" + r"$^0$", + color="r", + va="center", + ha="center", + transform=axes.transAxes, + ) + axes.text( + 0.5, + -0.0, + "-90" + r"$^0$", + color="r", + va="center", + ha="center", + transform=axes.transAxes, + ) + axes.text( + 0.5, + 1.0, + "90" + r"$^0$", + color="r", + va="center", + ha="center", + transform=axes.transAxes, + ) axes.set_aspect(aspect) # fig.colorbar(im_label) @@ -4395,7 +4741,7 @@ def show_ROI_on_image( def crop_image(image, crop_mask): """Crop the non_zeros pixels of an image to a new image""" - from skimage.util import crop, pad + from skimage.util import crop pxlst = np.where(crop_mask.ravel())[0] dims = crop_mask.shape @@ -4411,11 +4757,25 @@ def crop_image(image, crop_mask): maxpixelx = np.max(pixelx) maxpixely = np.max(pixely) crops = crop_mask * image - img_crop = crop(crops, ((minpixelx, imgwidthx - maxpixelx - 1), (minpixely, imgwidthy - maxpixely - 1))) + img_crop = crop( + crops, + ( + (minpixelx, imgwidthx - maxpixelx - 1), + (minpixely, imgwidthy - maxpixely - 1), + ), + ) return img_crop -def get_avg_img(data_series, img_samp_index=None, sampling=100, plot_=False, save=False, *argv, **kwargs): +def get_avg_img( + data_series, + img_samp_index=None, + sampling=100, + plot_=False, + save=False, + *argv, + **kwargs, +): """Get average imagef from a data_series by every sampling number to save time""" if img_samp_index == None: avg_img = np.average(data_series[::sampling], axis=0) @@ -4433,7 +4793,9 @@ def get_avg_img(data_series, img_samp_index=None, sampling=100, plot_=False, sav if "uid" in kwargs.keys(): uid = kwargs["uid"] - im = ax.imshow(avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2)) + im = ax.imshow( + avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2) + ) # ax.set_title("Masked Averaged Image") ax.set_title("uid= %s--Masked Averaged Image" % uid) fig.colorbar(im) @@ -4454,7 +4816,9 @@ def get_avg_img(data_series, img_samp_index=None, sampling=100, plot_=False, sav return avg_img -def check_ROI_intensity(avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs): +def check_ROI_intensity( + avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs +): """plot intensity versus pixel of a ring Parameters ---------- @@ -4499,7 +4863,15 @@ def check_ROI_intensity(avg_img, ring_mask, ring_number=3, save=False, plot=True # from tqdm import tqdm -def cal_g2(image_series, ring_mask, bad_image_process, bad_frame_list=None, good_start=0, num_buf=8, num_lev=None): +def cal_g2( + image_series, + ring_mask, + bad_image_process, + bad_frame_list=None, + good_start=0, + num_buf=8, + num_lev=None, +): """calculation g2 by using a multi-tau algorithm""" noframes = len(image_series) # number of frames, not "no frames" @@ -4513,20 +4885,29 @@ def cal_g2(image_series, ring_mask, bad_image_process, bad_frame_list=None, good if num_lev == None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) print("Bad Frames involved!") - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(new_imgs)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(new_imgs) + ) print("G2 calculation DONE!") else: - if num_lev == None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(image_series)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(image_series) + ) print("G2 calculation DONE!") return g2, lag_steps @@ -4561,13 +4942,12 @@ def trans_data_to_pd(data, label=None, dtype="array"): convert data into pandas.DataFrame Input: data: list or np.array - label: the coloum label of the data + label: the column label of the data dtype: list or array [[NOT WORK or dict (for dict only save the scalar not arrays values)]] Output: a pandas.DataFrame """ # lists a [ list1, list2...] all the list have the same length - import sys import pandas as pd from numpy import arange, array @@ -4579,7 +4959,7 @@ def trans_data_to_pd(data, label=None, dtype="array"): data = array(data) N, M = data.shape else: - print("Wrong data type! Now only support 'list' and 'array' tpye") + print("Wrong data type! Now only support 'list' and 'array' type") index = arange(N) if label == None: @@ -4589,7 +4969,9 @@ def trans_data_to_pd(data, label=None, dtype="array"): return df -def save_lists(data, label=None, filename=None, path=None, return_res=False, verbose=False): +def save_lists( + data, label=None, filename=None, path=None, return_res=False, verbose=False +): """ save_lists( data, label=None, filename=None, path=None) @@ -4650,7 +5032,15 @@ def get_pos_val_overlap(p1, v1, p2, v2, Nl): return v1[w1], v2[w2] -def save_arrays(data, label=None, dtype="array", filename=None, path=None, return_res=False, verbose=False): +def save_arrays( + data, + label=None, + dtype="array", + filename=None, + path=None, + return_res=False, + verbose=False, +): """ July 10, 2016, Y.G.@CHX save_arrays( data, label=None, dtype='array', filename=None, path=None): @@ -4685,13 +5075,13 @@ def save_arrays(data, label=None, dtype="array", filename=None, path=None, retur def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): """YG Dev Nov 20, 2017@CHX - calculate particle g2 fucntion by giving particle radius, Q , and solution viscosity using a simple + calculate particle g2 function by giving particle radius, Q , and solution viscosity using a simple exponetional model Input: radius: m qr, list, in A-1 visocity: N*s/m^2 (water at 25K = 8.9*10^(-4) ) - T: temperture, in K + T: temperature, in K e.g., for a 250 nm sphere in glycerol/water (90:10) at RT (298K) gives: 1.38064852*10**(-123)*298 / ( 6*np.pi * 0.20871 * 250 *10**(-9)) * 10**20 /1e5 = 4.18*10**5 A2/s taus: time @@ -4704,7 +5094,9 @@ def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): g2_q1 = np.zeros(len(qr), dtype=object) for i, q1 in enumerate(qr): relaxation_rate = D0 * q1**2 - g2_q1[i] = simple_exponential(taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1) + g2_q1[i] = simple_exponential( + taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1 + ) return g2_q1 @@ -4837,7 +5229,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): # spacing_is_list = isinstance(spacing, collections.Iterable) if width_is_list and spacing_is_list: if len(width) != len(spacing) + 1: - raise ValueError("List of spacings must be one less than list " "of widths.") + raise ValueError("List of spacings must be one less than list of widths.") if num_rings == None: try: num_rings = len(width) @@ -4857,7 +5249,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): if spacing_is_list: if num_rings - 1 != len(spacing): raise ValueError("num_rings does not match spacing list") - # Now regularlize the input. + # Now regularize the input. if not width_is_list: width = np.ones(num_rings) * width @@ -4920,7 +5312,10 @@ def get_non_uniform_edges( width = np.ones_like(centers) * width for i, c in enumerate(centers): edges[i * number_rings : (i + 1) * number_rings, :] = ring_edges( - inner_radius=c - width[i] * number_rings / 2, width=width[i], spacing=spacing, num_rings=number_rings + inner_radius=c - width[i] * number_rings / 2, + width=width[i], + spacing=spacing, + num_rings=number_rings, ) return edges @@ -4932,7 +5327,6 @@ def trans_tf_to_td(tf, dtype="dframe"): from datetime import datetime import numpy as np - import pandas as pd """translate time.float to time.date, td.type dframe: a dataframe @@ -5043,7 +5437,9 @@ def get_averaged_data_from_multi_res( if D != 3: keystr_average[sk[i] : sk[i + 1]] /= avg_count[sk[i + 1]] else: - keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[sk[i + 1]] + keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[ + sk[i + 1] + ] return keystr_average @@ -5082,7 +5478,9 @@ def save_g2_general(g2, taus, qr=None, qz=None, uid="uid", path=None, return_res # filename += '-uid=%s.csv' % (uid) filename1 = os.path.join(path, filename) df.to_csv(filename1) - print("The correlation function is saved in %s with filename as %s" % (path, filename)) + print( + "The correlation function is saved in %s with filename as %s" % (path, filename) + ) if return_res: return df @@ -5101,17 +5499,35 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): def simple_exponential_with_vibration(x, beta, relaxation_rate, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * relaxation_rate * x) + baseline + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * relaxation_rate * x) + + baseline + ) -def stretched_auto_corr_scat_factor_with_vibration(x, beta, relaxation_rate, alpha, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * (relaxation_rate * x) ** alpha) + baseline +def stretched_auto_corr_scat_factor_with_vibration( + x, beta, relaxation_rate, alpha, freq, amp, baseline=1 +): + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * (relaxation_rate * x) ** alpha) + + baseline + ) -def flow_para_function_with_vibration(x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1): +def flow_para_function_with_vibration( + x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1 +): vibration_part = 1 + amp * np.cos(2 * np.pi * freq * x) Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * vibration_part * Diff_part * Flow_part + baseline @@ -5119,11 +5535,17 @@ def flow_para_function(x, beta, relaxation_rate, flow_velocity, baseline=1): """flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )""" Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline -def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0): +def flow_para_function_explicitq( + x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0 +): """Nov 9, 2017 Basically, make q vector to (qr, angle), ###relaxation_rate is actually a diffusion rate flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) @@ -5138,7 +5560,14 @@ def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, bas Flow_part = ( np.pi**2 / (16 * x * flow_velocity * qr * abs(np.cos(q_ang))) - * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang))))) ** 2 + * abs( + erf( + np.sqrt( + 4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang)) + ) + ) + ) + ** 2 ) else: Flow_part = 1 @@ -5152,32 +5581,53 @@ def get_flow_velocity(average_velocity, shape_factor): return average_velocity * (1 - shape_factor) / (1 + shape_factor) -def stretched_flow_para_function(x, beta, relaxation_rate, alpha, flow_velocity, baseline=1): +def stretched_flow_para_function( + x, beta, relaxation_rate, alpha, flow_velocity, baseline=1 +): """ flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) """ Diff_part = np.exp(-2 * (relaxation_rate * x) ** alpha) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline def get_g2_fit_general_two_steps( - g2, taus, function="simple_exponential", second_fit_range=[0, 20], sequential_fit=False, *argv, **kwargs + g2, + taus, + function="simple_exponential", + second_fit_range=[0, 20], + sequential_fit=False, + *argv, + **kwargs, ): """ Fit g2 in two steps, i) Using the "function" to fit whole g2 to get baseline and beta (contrast) ii) Then using the obtained baseline and beta to fit g2 in a "second_fit_range" by using simple_exponential function """ - g2_fit_result, taus_fit, g2_fit = get_g2_fit_general(g2, taus, function, sequential_fit, *argv, **kwargs) + g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( + g2, taus, function, sequential_fit, *argv, **kwargs + ) guess_values = {} for k in list(g2_fit_result[0].params.keys()): - guess_values[k] = np.array([g2_fit_result[i].params[k].value for i in range(g2.shape[1])]) + guess_values[k] = np.array( + [g2_fit_result[i].params[k].value for i in range(g2.shape[1])] + ) if "guess_limits" in kwargs: guess_limits = kwargs["guess_limits"] else: - guess_limits = dict(baseline=[1, 1.8], alpha=[0, 2], beta=[0.0, 1], relaxation_rate=[0.001, 10000]) + guess_limits = dict( + baseline=[1, 1.8], + alpha=[0, 2], + beta=[0.0, 1], + relaxation_rate=[0.001, 10000], + ) g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, @@ -5185,7 +5635,12 @@ def get_g2_fit_general_two_steps( function="simple_exponential", sequential_fit=sequential_fit, fit_range=second_fit_range, - fit_variables={"baseline": False, "beta": False, "alpha": False, "relaxation_rate": True}, + fit_variables={ + "baseline": False, + "beta": False, + "alpha": False, + "relaxation_rate": True, + }, guess_values=guess_values, guess_limits=guess_limits, ) @@ -5194,7 +5649,14 @@ def get_g2_fit_general_two_steps( def get_g2_fit_general( - g2, taus, function="simple_exponential", sequential_fit=False, qval_dict=None, ang_init=90, *argv, **kwargs + g2, + taus, + function="simple_exponential", + sequential_fit=False, + qval_dict=None, + ang_init=90, + *argv, + **kwargs, ): """ Nov 9, 2017, give qval_dict for using function of flow_para_function_explicitq @@ -5215,9 +5677,9 @@ def get_g2_fit_general( supported function include: 'simple_exponential' (or 'simple'): fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential'(or 'streched'): fit by a streched exponential function, defined as + 'stretched_exponential'(or 'stretched'): fit by a stretched exponential function, defined as beta * ( np.exp( -2 * ( relaxation_rate * tau )**alpha ) + baseline - 'stretched_vibration': fit by a streched exponential function with vibration, defined as + 'stretched_vibration': fit by a stretched exponential function with vibration, defined as beta * (1 + amp*np.cos( 2*np.pi*60* x) )* np.exp(-2 * (relaxation_rate * x)**alpha) + baseline 'flow_para_function' (or flow): fit by a flow function @@ -5229,7 +5691,7 @@ def get_g2_fit_general( beta, relaxation_rate , alpha ,baseline values: a False or True, False for not vary 'guess_values': a dict, for initial value of the fitting para, - the defalut values are + the default values are dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0) 'guess_limits': a dict, for the limits of the fittting para, for example: @@ -5238,7 +5700,7 @@ def get_g2_fit_general( dict( baseline =[0.5, 2.5], alpha=[0, inf] ,beta = [0, 1], relaxation_rate= [0.0,1000] ) Returns ------- - fit resutls: a instance in limfit + fit results: a instance in limfit tau_fit fit_data by the model, it has the q number of g2 @@ -5267,16 +5729,22 @@ def get_g2_fit_general( _vars = [] if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) elif function == "flow_para_function_explicitq" or function == "flow_para_qang": mod = Model(flow_para_function_explicitq) # , independent_vars= _vars) - elif function == "flow_para_function_with_vibration" or function == "flow_vibration": + elif ( + function == "flow_para_function_with_vibration" or function == "flow_vibration" + ): mod = Model(flow_para_function_with_vibration) else: @@ -5297,7 +5765,11 @@ def get_g2_fit_general( for k in list(guess_limits.keys()): mod.set_param_hint(k, min=guess_limits[k][0], max=guess_limits[k][1]) - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): mod.set_param_hint("flow_velocity", min=0) if function == "flow_para_function_explicitq" or function == "flow_para_qang": mod.set_param_hint("flow_velocity", min=0) @@ -5331,7 +5803,12 @@ def get_g2_fit_general( _alpha_ = _alpha[0] else: _alpha_ = _alpha - pars = mod.make_params(beta=_beta_, alpha=_alpha_, relaxation_rate=_relaxation_rate_, baseline=_baseline_) + pars = mod.make_params( + beta=_beta_, + alpha=_alpha_, + relaxation_rate=_relaxation_rate_, + baseline=_baseline_, + ) if function == "flow_para_function" or function == "flow_para": _flow_velocity = _guess_val["flow_velocity"] @@ -5374,7 +5851,12 @@ def get_g2_fit_general( _freq = _guess_val["freq"] _amp = _guess_val["amp"] pars = mod.make_params( - beta=_beta, alpha=_alpha, freq=_freq, amp=_amp, relaxation_rate=_relaxation_rate, baseline=_baseline + beta=_beta, + alpha=_alpha, + freq=_freq, + amp=_amp, + relaxation_rate=_relaxation_rate, + baseline=_baseline, ) if function == "flow_vibration": @@ -5435,9 +5917,10 @@ def get_g2_fit_general( # pars[k].value = _guess_val[k][i] if function == "flow_para_function_explicitq" or function == "flow_para_qang": if qval_dict == None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: - pars = mod.make_params( beta=_beta_, alpha=_alpha_, @@ -5613,14 +6096,14 @@ def plot_g2_general( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline geometry: 'saxs': a saxs with Qr partition 'ang_saxs': a saxs with Qr and angular partition 'gi_saxs': gisaxs with Qz, Qr - one_plot: if True, plot all images in one pannel + one_plot: if True, plot all images in one panel kwargs: Returns @@ -5658,7 +6141,12 @@ def plot_g2_general( else: fit_res_ = None else: - g2_dict_, taus_dict_, qval_dict_, fit_res_ = g2_dict, taus_dict, qval_dict, fit_res + g2_dict_, taus_dict_, qval_dict_, fit_res_ = ( + g2_dict, + taus_dict, + qval_dict, + fit_res, + ) ( qr_label, @@ -5713,12 +6201,16 @@ def plot_g2_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -5766,35 +6258,49 @@ def plot_g2_general( # ax = fig[fig_subnum].add_subplot(sx,sy, i + 1 - fig_subnum*max_plotnum_fig) fig_subnum = i // max_plotnum_fig # print( i, sx,sy, fig_subnum, max_plotnum_fig, i + 1 - fig_subnum*max_plotnum_fig ) - ax = fig[fig_subnum].add_subplot(sx, sy, i + 1 - fig_subnum * max_plotnum_fig) + ax = fig[fig_subnum].add_subplot( + sx, sy, i + 1 - fig_subnum * max_plotnum_fig + ) ax.set_ylabel(r"$%s$" % ylabel + "(" + r"$\tau$" + ")") ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) if master_plot == "qz" or master_plot == "angle": if geometry != "gi_waxs": - title_long = r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = r"$Q_r= $" + "%i " % (long_label[l_ind]) # print( title_long,long_label,l_ind ) else: if geometry == "ang_saxs": # title_long = 'Ang= ' + '%.2f'%( long_label[l_ind] ) + r'$^\circ$' + '( %d )'%(l_ind) - title_long = "Ang= " + "%.2f" % (long_label[l_ind]) # + r'$^\circ$' + '( %d )'%(l_ind) + title_long = ( + "Ang= " + "%.2f" % (long_label[l_ind]) + ) # + r'$^\circ$' + '( %d )'%(l_ind) elif geometry == "gi_saxs": - title_long = r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = "" # print( master_plot ) if master_plot != "qz": ax.set_title(title_long + " (%s )" % (1 + l_ind), y=1.1, fontsize=12) else: - ax.set_title(title_long + " (%s )" % (1 + l_ind), y=1.05, fontsize=fontsize_sublabel) + ax.set_title( + title_long + " (%s )" % (1 + l_ind), + y=1.05, + fontsize=fontsize_sublabel, + ) # print( geometry ) # print( title_long ) if qth_interest != None: # it might have a bug here, todolist!!! lab = sorted(list(qval_dict_.keys())) # print( lab, l_ind) - ax.set_title(title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12) + ax.set_title( + title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12 + ) for ki, k in enumerate(list(g2_dict_.keys())): if ki == 0: c = "b" @@ -5836,22 +6342,46 @@ def plot_g2_general( else: # print('here ki ={} nlst = {}'.format( ki, nlst )) if nlst == 0: - ax.semilogx(x, y, m, color=c, markersize=6, label=g2_labels[ki]) + ax.semilogx( + x, + y, + m, + color=c, + markersize=6, + label=g2_labels[ki], + ) else: ax.semilogx(x, y, m, color=c, markersize=6) else: yerr = g2_err_dict[k][nlst][:, l_ind] if g2_labels == None: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) else: if nlst == 0: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6, label=g2_labels[ki]) + ax.errorbar( + x, + y, + yerr=yerr, + fmt=m, + color=c, + markersize=6, + label=g2_labels[ki], + ) else: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) ax.set_xscale("log", nonposx="clip") if nlst == 0: if l_ind == 0: - ax.legend(loc="best", fontsize=8, fancybox=True, framealpha=0.5) + ax.legend( + loc="best", + fontsize=8, + fancybox=True, + framealpha=0.5, + ) else: y = g2_dict_[k][:, l_ind] @@ -5862,7 +6392,9 @@ def plot_g2_general( if g2_labels == None: ax.semilogx(x, y, m, color=c, markersize=6) else: - ax.semilogx(x, y, m, color=c, markersize=6, label=g2_labels[ki]) + ax.semilogx( + x, y, m, color=c, markersize=6, label=g2_labels[ki] + ) else: yerr = g2_err_dict[k][:, l_ind] # print(x.shape, y.shape, yerr.shape) @@ -5870,7 +6402,15 @@ def plot_g2_general( if g2_labels == None: ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) else: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6, label=g2_labels[ki]) + ax.errorbar( + x, + y, + yerr=yerr, + fmt=m, + color=c, + markersize=6, + label=g2_labels[ki], + ) ax.set_xscale("log", nonposx="clip") if l_ind == 0: ax.legend(loc="best", fontsize=8, fancybox=True, framealpha=0.5) @@ -5894,17 +6434,26 @@ def plot_g2_general( elif function == "flow_vibration": rate = result1.best_values["relaxation_rate"] freq = result1.best_values["freq"] - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): rate = result1.best_values["relaxation_rate"] flow = result1.best_values["flow_velocity"] - if function == "flow_para_function_explicitq" or function == "flow_para_qang": + if ( + function == "flow_para_function_explicitq" + or function == "flow_para_qang" + ): diff = result1.best_values["diffusion"] qrr = short_ulabel[s_ind] # print(qrr) rate = diff * qrr**2 flow = result1.best_values["flow_velocity"] if qval_dict_ == None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: pass @@ -5927,11 +6476,19 @@ def plot_g2_general( txts = r"$\alpha$" + r"$ = %.3f$" % (alpha) dt += 0.1 # txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, + y=y0 - dt, + s=txts, + fontsize=fontsize, + transform=ax.transAxes, + ) txts = r"$baseline$" + r"$ = %.3f$" % (baseline) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if ( function == "flow_para_function" @@ -5941,15 +6498,29 @@ def plot_g2_general( ): txts = r"$flow_v$" + r"$ = %.3f$" % (flow) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, + y=y0 - dt, + s=txts, + fontsize=fontsize, + transform=ax.transAxes, + ) if function == "stretched_vibration" or function == "flow_vibration": txts = r"$vibration$" + r"$ = %.1f Hz$" % (freq) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, + y=y0 - dt, + s=txts, + fontsize=fontsize, + transform=ax.transAxes, + ) txts = r"$\beta$" + r"$ = %.3f$" % (beta) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -6008,7 +6579,9 @@ def power_func(x, D0, power=2): return D0 * x**power -def get_q_rate_fit_general(qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs): +def get_q_rate_fit_general( + qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs +): """ Dec 26,2016, Y.G.@CHX @@ -6173,7 +6746,9 @@ def plot_q_rate_fit_general( if show_text: txts = r"$D0: %.3e$" % D0 + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) if Nqz != 1: legend = ax.legend(loc="best") @@ -6242,7 +6817,13 @@ def is_outlier(points, thresh=3.5, verbose=False): def outlier_mask( - avg_img, mask, roi_mask, outlier_threshold=7.5, maximum_outlier_fraction=0.1, verbose=False, plot=False + avg_img, + mask, + roi_mask, + outlier_threshold=7.5, + maximum_outlier_fraction=0.1, + verbose=False, + plot=False, ): """ outlier_mask(avg_img,mask,roi_mask,outlier_threshold = 7.5,maximum_outlier_fraction = .1,verbose=False,plot=False) @@ -6250,7 +6831,7 @@ def outlier_mask( mask: 2D array, same size as avg_img with pixels that are already masked roi_mask: 2D array, same size as avg_img, ROI labels 'encoded' as mask values (i.e. all pixels belonging to ROI 5 have the value 5) outlier_threshold: threshold for MAD test - maximum_outlier_fraction: maximum fraction of pixels in an ROI that can be classifed as outliers. If the detected fraction is higher, no outliers will be masked for that ROI. + maximum_outlier_fraction: maximum fraction of pixels in an ROI that can be classified as outliers. If the detected fraction is higher, no outliers will be masked for that ROI. verbose: 'True' enables message output plot: 'True' enables visualization of outliers returns: mask (dtype=float): 0 for pixels that have been classified as outliers, 1 else @@ -6274,7 +6855,9 @@ def outlier_mask( if verbose: print("ROI #%s\naverage ROI intensity: %s" % (rn, ave_roi_int)) try: - upper_outlier_threshold = np.nanmin((out_l * pixel[0][0])[out_l * pixel[0][0] > ave_roi_int]) + upper_outlier_threshold = np.nanmin( + (out_l * pixel[0][0])[out_l * pixel[0][0] > ave_roi_int] + ) if verbose: print("upper outlier threshold: %s" % upper_outlier_threshold) except: @@ -6296,7 +6879,10 @@ def outlier_mask( ### MAKE SURE we don't REMOVE more than x percent of the pixels in the roi outlier_fraction = np.sum(out_l) / len(pixel[0][0]) if verbose: - print("fraction of pixel values detected as outliers: %s" % np.round(outlier_fraction, 2)) + print( + "fraction of pixel values detected as outliers: %s" + % np.round(outlier_fraction, 2) + ) if outlier_fraction > maximum_outlier_fraction: if verbose: print( diff --git a/pyCHX/chx_handlers.py b/pyCHX/chx_handlers.py index 998ce9c..3ca8aa1 100644 --- a/pyCHX/chx_handlers.py +++ b/pyCHX/chx_handlers.py @@ -5,7 +5,6 @@ # handler registration and database instantiation should be done # here and only here! from databroker import Broker -from databroker.assets.handlers_base import HandlerBase from eiger_io.fs_handler import EigerHandler as EigerHandlerPIMS from eiger_io.fs_handler import EigerImages as EigerImagesPIMS diff --git a/pyCHX/chx_libs.py b/pyCHX/chx_libs.py index 4440215..f1120c7 100644 --- a/pyCHX/chx_libs.py +++ b/pyCHX/chx_libs.py @@ -6,58 +6,38 @@ ## Import all the required packages for Data Analysis from databroker import Broker -from databroker.assets.path_only_handlers import RawHandler # edit handlers here to switch to PIMS or dask # this does the databroker import # from chxtools.handlers import EigerHandler -from eiger_io.fs_handler import EigerHandler -from IPython.core.magics.display import Javascript -from modest_image import imshow -from skbeam.core.utils import multi_tau_lags -from skimage.draw import disk, ellipse, line, line_aa, polygon db = Broker.named("chx") -import collections -import copy -import getpass import itertools -import os -import pickle -import random -import sys -import time -import warnings -from datetime import datetime -import h5py import matplotlib as mpl -import matplotlib.cm as mcm import matplotlib.pyplot as plt import numpy as np -import pims -import skbeam.core.correlation as corr -import skbeam.core.roi as roi -import skbeam.core.utils as utils # * scikit-beam - data analysis tools for X-ray science # - https://github.com/scikit-beam/scikit-beam # * xray-vision - plotting helper functions for X-ray science # - https://github.com/Nikea/xray-vision -import xray_vision -import xray_vision.mpl_plotting as mpl_plot -from lmfit import Model, Parameter, Parameters, minimize, report_fit -from matplotlib import gridspec -from matplotlib.colors import LogNorm -from matplotlib.figure import Figure -from mpl_toolkits.axes_grid1 import make_axes_locatable -from pandas import DataFrame -from PIL import Image -from tqdm import tqdm -from xray_vision.mask.manual_mask import ManualMask -from xray_vision.mpl_plotting import speckle -mcolors = itertools.cycle(["b", "g", "r", "c", "m", "y", "k", "darkgoldenrod", "oldlace", "brown", "dodgerblue"]) +mcolors = itertools.cycle( + [ + "b", + "g", + "r", + "c", + "m", + "y", + "k", + "darkgoldenrod", + "oldlace", + "brown", + "dodgerblue", + ] +) markers = itertools.cycle(list(plt.Line2D.filled_markers)) lstyles = itertools.cycle(["-", "--", "-.", ".", ":"]) colors = itertools.cycle( @@ -389,7 +369,9 @@ [1, 0, 0], [0.5, 0.0, 0.0], ] -cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list("cmap_jet_extended", color_list_jet_extended) +cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_jet_extended", color_list_jet_extended +) # Tweaked version of "view.gtk" default color scale color_list_vge = [ @@ -412,9 +394,11 @@ [254.0 / 255.0, 254.0 / 255.0, 0.0 / 255.0], [254.0 / 255.0, 254.0 / 255.0, 254.0 / 255.0], ] -cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list("cmap_vge_hdr", color_list_vge_hdr) +cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_vge_hdr", color_list_vge_hdr +) -# Simliar to Dectris ALBULA default color-scale +# Similar to Dectris ALBULA default color-scale color_list_hdr_albula = [ [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], [0.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], @@ -422,9 +406,13 @@ [255.0 / 255.0, 255.0 / 255.0, 0.0 / 255.0], # [ 255.0/255.0, 255.0/255.0, 255.0/255.0], ] -cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_albula", color_list_hdr_albula) +cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_albula", color_list_hdr_albula +) cmap_albula = cmap_hdr_albula -cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_r", color_list_hdr_albula[::-1]) +cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_r", color_list_hdr_albula[::-1] +) # Ugly color-scale, but good for highlighting many features in HDR data color_list_cur_hdr_goldish = [ @@ -438,4 +426,6 @@ [200.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], # red [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], # white ] -cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_goldish", color_list_cur_hdr_goldish) +cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_goldish", color_list_cur_hdr_goldish +) diff --git a/pyCHX/chx_olog.py b/pyCHX/chx_olog.py index 880c9f4..8e39c47 100644 --- a/pyCHX/chx_olog.py +++ b/pyCHX/chx_olog.py @@ -110,12 +110,15 @@ def update_olog_id(logid, text, attachments, verbose=True): ) client.updateLog(logid, upd) if verbose: - print(f"The url={url} was successfully updated with {text} and with " f"the attachments") + print( + f"The url={url} was successfully updated with {text} and with " + f"the attachments" + ) def update_olog_uid(uid, text, attachments): """ - Update olog book logid entry cotaining uid string with text and attachments + Update olog book logid entry containing uid string with text and attachments files. Parameters diff --git a/pyCHX/chx_outlier_detection.py b/pyCHX/chx_outlier_detection.py index 596393e..313683a 100644 --- a/pyCHX/chx_outlier_detection.py +++ b/pyCHX/chx_outlier_detection.py @@ -15,7 +15,13 @@ def is_outlier(points, thresh=3.5, verbose=False): def outlier_mask( - avg_img, mask, roi_mask, outlier_threshold=7.5, maximum_outlier_fraction=0.1, verbose=False, plot=False + avg_img, + mask, + roi_mask, + outlier_threshold=7.5, + maximum_outlier_fraction=0.1, + verbose=False, + plot=False, ): """ outlier_mask(avg_img,mask,roi_mask,outlier_threshold = 7.5,maximum_outlier_fraction = .1,verbose=False,plot=False) @@ -23,7 +29,7 @@ def outlier_mask( mask: 2D array, same size as avg_img with pixels that are already masked roi_mask: 2D array, same size as avg_img, ROI labels 'encoded' as mask values (i.e. all pixels belonging to ROI 5 have the value 5) outlier_threshold: threshold for MAD test - maximum_outlier_fraction: maximum fraction of pixels in an ROI that can be classifed as outliers. If the detected fraction is higher, no outliers will be masked for that ROI. + maximum_outlier_fraction: maximum fraction of pixels in an ROI that can be classified as outliers. If the detected fraction is higher, no outliers will be masked for that ROI. verbose: 'True' enables message output plot: 'True' enables visualization of outliers returns: mask (dtype=float): 0 for pixels that have been classified as outliers, 1 else @@ -47,7 +53,9 @@ def outlier_mask( if verbose: print("ROI #%s\naverage ROI intensity: %s" % (rn, ave_roi_int)) try: - upper_outlier_threshold = np.nanmin((out_l * pixel[0][0])[out_l * pixel[0][0] > ave_roi_int]) + upper_outlier_threshold = np.nanmin( + (out_l * pixel[0][0])[out_l * pixel[0][0] > ave_roi_int] + ) if verbose: print("upper outlier threshold: %s" % upper_outlier_threshold) except: @@ -69,7 +77,10 @@ def outlier_mask( ### MAKE SURE we don't REMOVE more than x percent of the pixels in the roi outlier_fraction = np.sum(out_l) / len(pixel[0][0]) if verbose: - print("fraction of pixel values detected as outliers: %s" % np.round(outlier_fraction, 2)) + print( + "fraction of pixel values detected as outliers: %s" + % np.round(outlier_fraction, 2) + ) if outlier_fraction > maximum_outlier_fraction: if verbose: print( diff --git a/pyCHX/chx_packages.py b/pyCHX/chx_packages.py index f7817b7..e6de659 100644 --- a/pyCHX/chx_packages.py +++ b/pyCHX/chx_packages.py @@ -1,256 +1,8 @@ -import pickle as cpk - -import historydict -from eiger_io.fs_handler import EigerImages -from skimage.draw import line, line_aa, polygon - -from pyCHX.chx_handlers import use_dask, use_pims +from pyCHX.chx_handlers import use_pims from pyCHX.chx_libs import ( - EigerHandler, - Javascript, - LogNorm, - Model, - cmap_albula, - cmap_vge, - datetime, db, - getpass, - h5py, - multi_tau_lags, - np, - os, - pims, - plt, - random, - roi, - time, - tqdm, - utils, - warnings, ) -use_pims(db) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' - -from pyCHX.chx_compress import ( - MultifileBNLCustom, - combine_binary_files, - compress_eigerdata, - create_compress_header, - get_eigerImage_per_file, - init_compress_eigerdata, - para_compress_eigerdata, - para_segment_compress_eigerdata, - read_compressed_eigerdata, - segment_compress_eigerdata, -) -from pyCHX.chx_compress_analysis import ( - Multifile, - cal_each_ring_mean_intensityc, - cal_waterfallc, - compress_eigerdata, - get_avg_imgc, - get_each_frame_intensityc, - get_each_ring_mean_intensityc, - get_time_edge_avg_img, - mean_intensityc, - plot_each_ring_mean_intensityc, - plot_waterfallc, - read_compressed_eigerdata, -) -from pyCHX.chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq -from pyCHX.chx_correlationp import _one_time_process_errorp, auto_two_Arrayp, cal_g2p, cal_GPF, get_g2_from_ROI_GPF -from pyCHX.chx_crosscor import CrossCorrelator2, run_para_ccorr_sym -from pyCHX.chx_generic_functions import ( - R_2, - apply_mask, - average_array_withNan, - check_bad_uids, - check_lost_metadata, - check_ROI_intensity, - check_shutter_open, - combine_images, - copy_data, - create_cross_mask, - create_fullImg_with_box, - create_hot_pixel_mask, - create_polygon_mask, - create_rectangle_mask, - create_ring_mask, - create_seg_ring, - create_time_slice, - create_user_folder, - delete_data, - extract_data_from_file, - filter_roi_mask, - find_bad_pixels, - find_bad_pixels_FD, - find_good_xpcs_uids, - find_index, - find_uids, - fit_one_peak_curve, - get_averaged_data_from_multi_res, - get_avg_img, - get_bad_frame_list, - get_base_all_filenames, - get_cross_point, - get_current_pipeline_filename, - get_current_pipeline_fullpath, - get_curve_turning_points, - get_detector, - get_detectors, - get_each_frame_intensity, - get_echos, - get_eigerImage_per_file, - get_fit_by_two_linear, - get_fra_num_by_dose, - get_g2_fit_general, - get_image_edge, - get_image_with_roi, - get_img_from_iq, - get_last_uids, - get_mass_center_one_roi, - get_max_countc, - get_meta_data, - get_multi_tau_lag_steps, - get_non_uniform_edges, - get_print_uids, - get_q_rate_fit_general, - get_qval_dict, - get_qval_qwid_dict, - get_roi_mask_qval_qwid_by_shift, - get_roi_nr, - get_series_g2_taus, - get_SG_norm, - get_sid_filenames, - get_today_date, - get_touched_qwidth, - get_waxs_beam_center, - lin2log_g2, - linear_fit, - load_dask_data, - load_data, - load_mask, - load_pilatus, - ls_dir, - mask_badpixels, - mask_exclude_badpixel, - move_beamstop, - pad_length, - pload_obj, - plot1D, - plot_fit_two_linear_fit, - plot_g2_general, - plot_q_g2fitpara_general, - plot_q_rate_fit_general, - plot_q_rate_general, - plot_xy_with_fit, - plot_xy_x2, - print_dict, - psave_obj, - read_dict_csv, - refine_roi_mask, - reverse_updown, - ring_edges, - run_time, - save_array_to_tiff, - save_arrays, - save_current_pipeline, - save_dict_csv, - save_g2_fit_para_tocsv, - save_g2_general, - save_lists, - save_oavs_tifs, - sgolay2d, - shift_mask, - show_img, - show_ROI_on_image, - shrink_image, - trans_data_to_pd, - update_qval_dict, - update_roi_mask, - validate_uid, -) -from pyCHX.chx_olog import Attachment, LogEntry, update_olog_id, update_olog_uid, update_olog_uid_with_file -from pyCHX.chx_specklecp import ( - get_binned_his_std, - get_contrast, - get_his_std_from_pds, - get_xsvs_fit, - plot_g2_contrast, - plot_xsvs_fit, - save_bin_his_std, - save_KM, - xsvsc, - xsvsp, -) -from pyCHX.Create_Report import ( - create_multi_pdf_reports_for_uids, - create_one_pdf_reports_for_uids, - create_pdf_report, - export_xpcs_results_to_h5, - extract_xpcs_results_from_h5, - make_pdf_report, -) -from pyCHX.DataGonio import qphiavg -from pyCHX.SAXS import ( - fit_form_factor, - fit_form_factor2, - form_factor_residuals_bg_lmfit, - form_factor_residuals_lmfit, - get_form_factor_fit_lmfit, - poly_sphere_form_factor_intensity, - show_saxs_qmap, -) -from pyCHX.Two_Time_Correlation_Function import ( - get_aged_g2_from_g12, - get_aged_g2_from_g12q, - get_four_time_from_two_time, - get_one_time_from_two_time, - rotate_g12q_to_rectangle, - show_C12, -) -from pyCHX.XPCS_GiSAXS import ( - cal_1d_qr, - convert_gisaxs_pixel_to_q, - fit_qr_qz_rate, - get_1d_qr, - get_each_box_mean_intensity, - get_gisaxs_roi, - get_qedge, - get_qmap_label, - get_qr_tick_label, - get_qzr_map, - get_qzrmap, - get_reflected_angles, - get_t_qrc, - multi_uids_gisaxs_xpcs_analysis, - plot_gisaxs_g4, - plot_gisaxs_two_g2, - plot_qr_1d_with_ROI, - plot_qrt_pds, - plot_qzr_map, - plot_t_qrc, - show_qzr_map, - show_qzr_roi, -) -from pyCHX.XPCS_SAXS import ( - cal_g2, - combine_two_roi_mask, - create_hot_pixel_mask, - get_angular_mask, - get_circular_average, - get_cirucular_average_std, - get_each_ring_mean_intensity, - get_QrQw_From_RoiMask, - get_ring_mask, - get_seg_from_ring_mask, - get_t_iq, - get_t_iqc, - get_t_iqc_imstack, - multi_uids_saxs_xpcs_analysis, - plot_circular_average, - plot_qIq_with_ROI, - plot_t_iqc, - recover_img_from_iq, - save_lists, -) +use_pims( + db +) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' diff --git a/pyCHX/chx_speckle.py b/pyCHX/chx_speckle.py index a6eb8f3..134913b 100644 --- a/pyCHX/chx_speckle.py +++ b/pyCHX/chx_speckle.py @@ -10,7 +10,6 @@ import logging import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series @@ -19,13 +18,10 @@ import sys from datetime import datetime -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq def xsvs( @@ -84,8 +80,8 @@ def xsvs( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -266,7 +262,7 @@ def xsvs( prob_k_all[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) prob_k_std_dev[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) - logger.info("Processing time for XSVS took %s seconds." "", (time.time() - start_time)) + logger.info("Processing time for XSVS took %s seconds.", (time.time() - start_time)) elapsed_time = time.time() - start_time # print (Num) print("Total time: %.2f min" % (elapsed_time / 60.0)) @@ -278,7 +274,17 @@ def xsvs( def _process( - num_roi, level, buf_no, buf, img_per_level, labels, max_cts, bin_edges, prob_k, prob_k_pow, track_bad_level + num_roi, + level, + buf_no, + buf, + img_per_level, + labels, + max_cts, + bin_edges, + prob_k, + prob_k_pow, + track_bad_level, ): """ Internal helper function. This modifies inputs in place. @@ -322,7 +328,9 @@ def _process( roi_data = data[labels == label] spe_hist, bin_edges = np.histogram(roi_data, bins=bin_edges, density=True) spe_hist = np.nan_to_num(spe_hist) - prob_k[level, j] += (spe_hist - prob_k[level, j]) / (img_per_level[level] - track_bad_level[level]) + prob_k[level, j] += (spe_hist - prob_k[level, j]) / ( + img_per_level[level] - track_bad_level[level] + ) prob_k_pow[level, j] += (np.power(spe_hist, 2) - prob_k_pow[level, j]) / ( img_per_level[level] - track_bad_level[level] @@ -407,7 +415,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln @@ -415,8 +422,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -499,8 +506,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -566,9 +573,9 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline @@ -582,8 +589,15 @@ def get_roi(data, threshold=1e-3): return roi[0] -def plot_sxvs(Knorm_bin_edges, spe_cts_all, uid=None, q_ring_center=None, xlim=[0, 3.5], time_steps=None): - """a convinent function to plot sxvs results""" +def plot_sxvs( + Knorm_bin_edges, + spe_cts_all, + uid=None, + q_ring_center=None, + xlim=[0, 3.5], + time_steps=None, +): + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -604,7 +618,10 @@ def plot_sxvs(Knorm_bin_edges, spe_cts_all, uid=None, q_ring_center=None, xlim=[ axes.set_xlabel("K/") axes.set_ylabel("P(K)") (art,) = axes.plot( - Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], "-o", label=str(time_steps[j]) + " ms" + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + "-o", + label=str(time_steps[j]) + " ms", ) axes.set_xlim(xlim) axes.set_title("Q " + "%.4f " % (q_ring_center[i]) + r"$\AA^{-1}$") @@ -626,7 +643,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -634,18 +651,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -662,7 +678,11 @@ def fit_xsvs1( else: sy = int(num_rings / sx + 1) fig = plt.figure(figsize=(10, 6)) - plt.title("uid= %s" % uid + " Fitting with Negative Binomial Function", fontsize=20, y=1.02) + plt.title( + "uid= %s" % uid + " Fitting with Negative Binomial Function", + fontsize=20, + y=1.02, + ) plt.axes(frameon=False) plt.xticks([]) plt.yticks([]) @@ -681,14 +701,24 @@ def fit_xsvs1( # print ( rois ) if func == "bn": - result = mod.fit(spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=5 * 2**j, M=12) + result = mod.fit( + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=5 * 2**j, + M=12, + ) elif func == "gm": result = mod.fit( - spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=K_mean[i] * 2**j, M=20 + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=K_mean[i] * 2**j, + M=20, ) elif func == "ps": result = mod.fit( - spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=K_mean[i] * 2**j + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=K_mean[i] * 2**j, ) else: pass @@ -711,9 +741,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" @@ -732,7 +766,10 @@ def fit_xsvs1( if i == 0: (art,) = axes.plot( - Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], "o", label=str(time_steps[j]) + " ms" + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + "o", + label=str(time_steps[j]) + " ms", ) else: (art,) = axes.plot( @@ -896,7 +933,16 @@ def nbinomres(p, hist, x, N): return err -def get_xsvs_fit(spe_cts_all, K_mean, varyK=True, max_bins=None, qth=None, g2=None, times=None, taus=None): +def get_xsvs_fit( + spe_cts_all, + K_mean, + varyK=True, + max_bins=None, + qth=None, + g2=None, + times=None, + taus=None, +): """ Fit the xsvs by Negative Binomial Function using max-likelihood chi-squares """ @@ -929,7 +975,11 @@ def get_xsvs_fit(spe_cts_all, K_mean, varyK=True, max_bins=None, qth=None, g2=No mi_g2 = 1 / (g2c[:, i] - 1) m_ = np.interp(times, taus, mi_g2) for j in range(num_times): - x_, x, y = bin_edges[j, i][:-1], Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j, i][:-1], + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + ) if g2 is not None: m0 = m_[j] else: @@ -985,7 +1035,11 @@ def plot_xsvs_fit( fontsize=3, ): fig = plt.figure(figsize=(9, 6)) - plt.title("uid= %s" % uid + " Fitting with Negative Binomial Function", fontsize=20, y=1.02) + plt.title( + "uid= %s" % uid + " Fitting with Negative Binomial Function", + fontsize=20, + y=1.02, + ) plt.axes(frameon=False) plt.xticks([]) plt.yticks([]) @@ -1019,7 +1073,11 @@ def plot_xsvs_fit( n += 1 for j in range(num_times): # print( i, j ) - x_, x, y = bin_edges[j, i][:-1], Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j, i][:-1], + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + ) # Using the best K and M values interpolate and get more values for fitting curve xscale = bin_edges[j, i][:-1][1] / Knorm_bin_edges[j, i][:-1][1] @@ -1087,11 +1145,18 @@ def get_max_countc(FD, labeled_array): if labeled_array.shape != (FD.md["ncols"], FD.md["nrows"]): raise ValueError( " `image` shape (%d, %d) in FD is not equal to the labeled_array shape (%d, %d)" - % (FD.md["ncols"], FD.md["nrows"], labeled_array.shape[0], labeled_array.shape[1]) + % ( + FD.md["ncols"], + FD.md["nrows"], + labeled_array.shape[0], + labeled_array.shape[1], + ) ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -1108,7 +1173,16 @@ def get_contrast(ML_val): return contrast_factorL -def plot_g2_contrast(contrast_factorL, g2, times, taus, q_ring_center=None, uid=None, vlim=[0.8, 1.2], qth=None): +def plot_g2_contrast( + contrast_factorL, + g2, + times, + taus, + q_ring_center=None, + uid=None, + vlim=[0.8, 1.2], + qth=None, +): nq, nt = contrast_factorL.shape if qth is not None: @@ -1125,7 +1199,9 @@ def plot_g2_contrast(contrast_factorL, g2, times, taus, q_ring_center=None, uid= # fig = plt.figure(figsize=(14, 10)) fig = plt.figure() - plt.title("uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08) + plt.title( + "uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08 + ) if qth is None: plt.axis("off") n = 1 diff --git a/pyCHX/chx_specklecp.py b/pyCHX/chx_specklecp.py index d03ea3b..324e5bf 100644 --- a/pyCHX/chx_specklecp.py +++ b/pyCHX/chx_specklecp.py @@ -8,31 +8,23 @@ from __future__ import absolute_import, division, print_function import logging -import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series logger = logging.getLogger(__name__) -import itertools import os -import sys from datetime import datetime from multiprocessing import Pool -import dill -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq from tqdm import tqdm -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_generic_functions import trans_data_to_pd @@ -151,11 +143,20 @@ def xsvsp_single( number_of_img = noframes for i in range(FD.beg, FD.end): pass_FD(FD, i) - label_arrays = [np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:]] + label_arrays = [ + np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:] + ] qind, pixelist = roi.extract_label_indices(label_array) if norm is not None: norms = [ - norm[np.in1d(pixelist, extract_label_indices(np.array(label_array == i, dtype=np.int64))[1])] + norm[ + np.in1d( + pixelist, + extract_label_indices(np.array(label_array == i, dtype=np.int64))[ + 1 + ], + ) + ] for i in np.unique(label_array)[1:] ] @@ -349,7 +350,7 @@ def xsvsc_single( norm=None, progress_bar=True, ): - """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with phisical meaning, + """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with physical meaning, photon_number@one_particular_count = photon_tolal_number * photon_distribution@one_particular_count +/- sqrt( photon_number@one_particular_count ) @@ -398,8 +399,8 @@ def xsvsc_single( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -573,7 +574,17 @@ def xsvsc_single( def _process( - num_roi, level, buf_no, buf, img_per_level, labels, max_cts, bin_edges, prob_k, prob_k_pow, track_bad_level + num_roi, + level, + buf_no, + buf, + img_per_level, + labels, + max_cts, + bin_edges, + prob_k, + prob_k_pow, + track_bad_level, ): """ Internal helper function. This modifies inputs in place. @@ -681,7 +692,10 @@ def get_his_std_qi(data_pixel_qi, max_cts=None): bins = np.arange(max_cts) dqn, dqm = data_pixel_qi.shape # get histogram here - H = np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) / dqm + H = ( + np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) + / dqm + ) # do average for different frame his = np.average(H, axis=0) std = np.std(H, axis=0) @@ -712,7 +726,9 @@ def get_his_std(data_pixel, rois, max_cts=None): for qi in range(noqs): pixelist_qi = np.where(qind == qi + 1)[0] # print(qi, max_cts) - bins, his[qi], std[qi], kmean[qi] = get_his_std_qi(data_pixel[:, pixelist_qi], max_cts) + bins, his[qi], std[qi], kmean[qi] = get_his_std_qi( + data_pixel[:, pixelist_qi], max_cts + ) return bins, his, std, kmean @@ -786,7 +802,9 @@ def get_binned_his_std_qi(data_pixel_qi, lag_steps, max_cts=None): i = 0 for lag in lag_steps: data_pixel_qi_ = np.sum(reshape_array(data_pixel_qi, lag), axis=1) - bins[i], his[i], std[i], kmean[i] = get_his_std_qi(data_pixel_qi_, max_cts * lag) + bins[i], his[i], std[i], kmean[i] = get_his_std_qi( + data_pixel_qi_, max_cts * lag + ) i += 1 return bins, his, std, kmean @@ -875,7 +893,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln ###########################3 @@ -946,9 +963,9 @@ def nbinomlog1(p, hist, x, N, mu): Vary M (shape param) but mu (count rate) fixed (using leastsq) p: fitting parameter, in this case is M, coherent mode number - hist: histogram of photon count for each bin (is a number not probablity) + hist: histogram of photon count for each bin (is a number not probability) x: photon count - N: total photons count in the statistics, ( probablity = hist / N ) + N: total photons count in the statistics, ( probability = hist / N ) mu: average photon count for each bin """ @@ -1042,7 +1059,11 @@ def get_xsvs_fit( kmean_guess = K_mean[j, i] N = spec_sum[j, i] if spec_bins is None: - x_, x, y = bin_edges[j, i][:-1], Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j, i][:-1], + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + ) else: x_, x, y = bin_edges[j], bin_edges[j] / kmean_guess, spe_cts_all[j, i] @@ -1192,12 +1213,22 @@ def plot_xsvs_fit( L = len(spe_cts_all[j, i]) if spec_bins is None: max_cts_ = max_cts * 2**j - x_, x, y = bin_edges[j, i][:L], Knorm_bin_edges[j, i][:L], spe_cts_all[j, i] - xscale = (x_ / x)[1] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] + x_, x, y = ( + bin_edges[j, i][:L], + Knorm_bin_edges[j, i][:L], + spe_cts_all[j, i], + ) + xscale = (x_ / x)[ + 1 + ] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] # print( xscale ) else: max_cts_ = max_cts * lag_steps[j] - x_, x, y = bin_edges[j][:L], bin_edges[j][:L] / kmean_guess, spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j][:L], + bin_edges[j][:L] / kmean_guess, + spe_cts_all[j, i], + ) xscale = kmean_guess # Using the best K and M values interpolate and get more values for fitting curve @@ -1238,7 +1269,11 @@ def plot_xsvs_fit( # if j == 0: if j < 2: label = "nbinom_L" - txts = r"$M=%s$" % round(ML_val[i][j], 2) + "," + r"$K=%s$" % round(KL_val[i][j], 2) + txts = ( + r"$M=%s$" % round(ML_val[i][j], 2) + + "," + + r"$K=%s$" % round(KL_val[i][j], 2) + ) # print( ML_val[i] ) x = 0.05 y0 = 0.2 - j * 0.1 @@ -1246,7 +1281,9 @@ def plot_xsvs_fit( fontsize_ = fontsize * 2 else: fontsize_ = 18 - axes.text(x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes) + axes.text( + x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes + ) else: label = "" (art,) = axes.plot(fitx_, fitL, "-r", label=label) @@ -1317,14 +1354,22 @@ def save_KM(K_mean, KL_val, ML_val, qs=None, level_time=None, uid=None, path=Non + ["M_Fit_%s" % s for s in level_time] + ["Contrast_Fit_%s" % s for s in level_time] ) - data = np.hstack([(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)]) + data = np.hstack( + [(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)] + ) if qs is not None: qs = np.array(qs) l = ["q"] + l # print( (K_mean).T, (K_mean).T.shape ) # print( qs ) data = np.hstack( - [qs.reshape(L, 1), (K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)] + [ + qs.reshape(L, 1), + (K_mean).T, + kl.reshape(L, n), + ml.reshape(L, n), + (1 / ml).reshape(L, n), + ] ) df = DataFrame(data) @@ -1355,9 +1400,15 @@ def get_his_std_from_pds(spec_pds, his_shapes=None): spec_std = np.zeros([M, N], dtype=np.object) for i in range(M): for j in range(N): - spec_his[i, j] = np.array(spec_pds[spkeys[1 + i * N + j]][~np.isnan(spec_pds[spkeys[1 + i * N + j]])]) + spec_his[i, j] = np.array( + spec_pds[spkeys[1 + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + i * N + j]]) + ] + ) spec_std[i, j] = np.array( - spec_pds[spkeys[1 + 2 * N + i * N + j]][~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]])] + spec_pds[spkeys[1 + 2 * N + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]]) + ] ) return spec_his, spec_std @@ -1438,7 +1489,16 @@ def plot_g2_contrast( # plt.show() -def get_xsvs_fit_old(spe_cts_all, K_mean, varyK=True, qth=None, max_bins=2, g2=None, times=None, taus=None): +def get_xsvs_fit_old( + spe_cts_all, + K_mean, + varyK=True, + qth=None, + max_bins=2, + g2=None, + times=None, + taus=None, +): """ Fit the xsvs by Negative Binomial Function using max-likelihood chi-squares """ @@ -1471,7 +1531,11 @@ def get_xsvs_fit_old(spe_cts_all, K_mean, varyK=True, qth=None, max_bins=2, g2=N mi_g2 = 1 / (g2c[:, i] - 1) m_ = np.interp(times, taus, mi_g2) for j in range(num_times): - x_, x, y = bin_edges[j, i][:-1], Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j, i][:-1], + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + ) if g2 is not None: m0 = m_[j] else: @@ -1521,8 +1585,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -1605,8 +1669,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -1672,15 +1736,22 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline -def plot_sxvs(Knorm_bin_edges, spe_cts_all, uid=None, q_ring_center=None, xlim=[0, 3.5], time_steps=None): - """a convinent function to plot sxvs results""" +def plot_sxvs( + Knorm_bin_edges, + spe_cts_all, + uid=None, + q_ring_center=None, + xlim=[0, 3.5], + time_steps=None, +): + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -1701,7 +1772,10 @@ def plot_sxvs(Knorm_bin_edges, spe_cts_all, uid=None, q_ring_center=None, xlim=[ axes.set_xlabel("K/") axes.set_ylabel("P(K)") (art,) = axes.plot( - Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], "-o", label=str(time_steps[j]) + " ms" + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + "-o", + label=str(time_steps[j]) + " ms", ) axes.set_xlim(xlim) axes.set_title("Q " + "%.4f " % (q_ring_center[i]) + r"$\AA^{-1}$") @@ -1723,7 +1797,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -1731,18 +1805,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -1759,7 +1832,11 @@ def fit_xsvs1( else: sy = int(num_rings / sx + 1) fig = plt.figure(figsize=(10, 6)) - plt.title("uid= %s" % uid + " Fitting with Negative Binomial Function", fontsize=20, y=1.02) + plt.title( + "uid= %s" % uid + " Fitting with Negative Binomial Function", + fontsize=20, + y=1.02, + ) plt.axes(frameon=False) plt.xticks([]) plt.yticks([]) @@ -1778,14 +1855,24 @@ def fit_xsvs1( # print ( rois ) if func == "bn": - result = mod.fit(spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=5 * 2**j, M=12) + result = mod.fit( + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=5 * 2**j, + M=12, + ) elif func == "gm": result = mod.fit( - spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=K_mean[i] * 2**j, M=20 + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=K_mean[i] * 2**j, + M=20, ) elif func == "ps": result = mod.fit( - spe_cts_all[j, i][rois], bin_values=bin_edges[j, i][:-1][rois], K=K_mean[i] * 2**j + spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=K_mean[i] * 2**j, ) else: pass @@ -1808,9 +1895,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" @@ -1829,7 +1920,10 @@ def fit_xsvs1( if i == 0: (art,) = axes.plot( - Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], "o", label=str(time_steps[j]) + " ms" + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + "o", + label=str(time_steps[j]) + " ms", ) else: (art,) = axes.plot( @@ -1944,7 +2038,15 @@ def plot_xsvs_g2(g2, taus, res_pargs=None, *argv, **kwargs): def get_xsvs_fit_old1( - spe_cts_all, K_mean, spec_std=None, varyK=True, qth=None, max_bins=None, g2=None, times=None, taus=None + spe_cts_all, + K_mean, + spec_std=None, + varyK=True, + qth=None, + max_bins=None, + g2=None, + times=None, + taus=None, ): """ Fit the xsvs by Negative Binomial Function using max-likelihood chi-squares @@ -1978,7 +2080,11 @@ def get_xsvs_fit_old1( mi_g2 = 1 / (g2c[:, i] - 1) m_ = np.interp(times, taus, mi_g2) for j in range(num_times): - x_, x, y = bin_edges[j, i][:-1], Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i] + x_, x, y = ( + bin_edges[j, i][:-1], + Knorm_bin_edges[j, i][:-1], + spe_cts_all[j, i], + ) if spec_std is not None: yerr = spec_std[j, i] else: diff --git a/pyCHX/chx_xpcs_xsvs_jupyter_V1.py b/pyCHX/chx_xpcs_xsvs_jupyter_V1.py index 31ec64e..d301026 100644 --- a/pyCHX/chx_xpcs_xsvs_jupyter_V1.py +++ b/pyCHX/chx_xpcs_xsvs_jupyter_V1.py @@ -19,7 +19,7 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): """Get Iq at different time edge (difined by slice_num and slice_width) for a list of uids Input: uid_list: list of string (uid) - setup_pargs: dict, for caculation of Iq, the key of this dict should include + setup_pargs: dict, for calculation of Iq, the key of this dict should include 'center': beam center 'dpix': pixel size 'lambda_': X-ray wavelength @@ -44,7 +44,9 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): good_start = 5 FD = Multifile(filename, good_start, N) Nimg = FD.end - FD.beg - time_edge = create_time_slice(Nimg, slice_num=slice_num, slice_width=slice_width, edges=None) + time_edge = create_time_slice( + Nimg, slice_num=slice_num, slice_width=slice_width, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) tstamp[uid] = time_edge[:, 0] * timeperframe @@ -55,7 +57,7 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): - """plot q2~Iq at differnt time""" + """plot q2~Iq at different time""" if ax is None: fig, ax = plt.subplots() q = qt @@ -77,7 +79,7 @@ def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): def plot_t_iqc_uids(qs, iqsts, tstamps): - """plot q2~Iq at differnt time for a uid list""" + """plot q2~Iq at different time for a uid list""" keys = list(qs.keys()) fig, ax = plt.subplots() for uid in keys: @@ -107,11 +109,11 @@ def plot_entries_from_csvlist( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list csvs + plot entries for a list csvs Input: csv_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -253,11 +255,11 @@ def plot_entries_from_uids( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list uids + plot entries for a list uids Input: uid_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -309,7 +311,9 @@ def plot_entries_from_uids( filename = "uid=%s_Res.h5" % uid_dict[u] else: filename = filename_list[i] - total_res = extract_xpcs_results_from_h5(filename=filename, import_dir=inDiru, exclude_keys=["g12b"]) + total_res = extract_xpcs_results_from_h5( + filename=filename, import_dir=inDiru, exclude_keys=["g12b"] + ) if key == "g2": d = total_res[key][1:, qth] taus = total_res["taus"][1:] @@ -345,7 +349,6 @@ def plot_entries_from_uids( ) elif key == "iq": - x = total_res["q_saxs"] y = total_res["iq_saxs"] plot1D( @@ -390,10 +393,10 @@ def plot_entries_from_uids( def get_iq_from_uids(uids, mask, setup_pargs): """Y.G. developed July 17, 2017 @CHX - Get q-Iq of a uids dict, each uid could corrrespond one frame or a time seriers + Get q-Iq of a uids dict, each uid could correspond one frame or a time seriers uids: dict, val: meaningful decription, key: a list of uids mask: bool-type 2D array - setup_pargs: dict, at least should contains, the following paramters for calculation of I(q) + setup_pargs: dict, at least should contains, the following parameters for calculation of I(q) 'Ldet': 4917.50495, 'center': [988, 1120], @@ -410,7 +413,6 @@ def get_iq_from_uids(uids, mask, setup_pargs): n = 0 for k in list(uids.keys()): for uid in uids[k]: - uidstr = "uid=%s" % uid sud = get_sid_filenames(db[uid]) # print(sud) @@ -449,7 +451,9 @@ def get_iq_from_uids(uids, mask, setup_pargs): setup_pargs["uid"] = uidstr - qp_saxs, iq_saxs, q_saxs = get_circular_average(avg_img, mask, pargs=setup_pargs, save=True) + qp_saxs, iq_saxs, q_saxs = get_circular_average( + avg_img, mask, pargs=setup_pargs, save=True + ) if n == 0: iqs = np.zeros([len(q_saxs), Nuid + 1]) iqs[:, 0] = q_saxs @@ -470,7 +474,14 @@ def get_iq_from_uids(uids, mask, setup_pargs): filename = setup_pargs["filename"] else: filename = "qIq.csv" - pd = save_arrays(iqs, label=label, dtype="array", filename=filename, path=setup_pargs["path"], return_res=True) + pd = save_arrays( + iqs, + label=label, + dtype="array", + filename=filename, + path=setup_pargs["path"], + return_res=True, + ) return pd, img_data @@ -480,8 +491,8 @@ def wait_func(wait_time=2): # print( 'Starting to do something here...') -def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): - """check the completion of a data uid acquistion +def wait_data_acquisition_finish(uid, wait_time=2, max_try_num=3): + """check the completion of a data uid acquisition Parameter: uid: wait_time: the waiting step in unit of second @@ -500,14 +511,14 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): try: get_meta_data(uid) FINISH = True - print("The data acquistion finished.") + print("The data acquisition finished.") print("Starting to do something here...") except: wait_func(wait_time=wait_time) w += 1 print("Try number: %s" % w) if w > max_try_num: - print("There could be something going wrong with data acquistion.") + print("There could be something going wrong with data acquisition.") print("Force to terminate after %s tries." % w) FINISH = True Fake_FINISH = False @@ -517,7 +528,7 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): def get_uids_by_range(start_uidth=-1, end_uidth=0): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end uid number, i.e. -10, -1 + A wrap function to find uids by giving start and end uid number, i.e. -10, -1 Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -540,7 +551,7 @@ def get_uids_by_range(start_uidth=-1, end_uidth=0): def get_uids_in_time_period(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -561,7 +572,9 @@ def get_uids_in_time_period(start_time, stop_time): return np.array(uids), np.array(fuids) -def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3): +def do_compress_on_line( + start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3 +): """Y.G. Mar 10, 2017 Do on-line compress by giving start time and stop time Parameters: @@ -580,7 +593,7 @@ def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_t print("*" * 50) print("Do compress for %s now..." % uid) if db[uid]["start"]["plan_name"] == "count": - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) @@ -593,7 +606,11 @@ def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_t bin_frame_number=1, ) - update_olog_uid(uid=md["uid"], text="Data are on-line sparsified!", attachments=None) + update_olog_uid( + uid=md["uid"], + text="Data are on-line sparsified!", + attachments=None, + ) except: print("There are something wrong with this data: %s..." % uid) print("*" * 50) @@ -601,7 +618,14 @@ def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_t def realtime_xpcs_analysis( - start_time, stop_time, run_pargs, md_update=None, wait_time=2, max_try_num=3, emulation=False, clear_plot=False + start_time, + stop_time, + run_pargs, + md_update=None, + wait_time=2, + max_try_num=3, + emulation=False, + clear_plot=False, ): """Y.G. Mar 10, 2017 Do on-line xpcs by giving start time and stop time @@ -627,13 +651,16 @@ def realtime_xpcs_analysis( print("*" * 50) # print('Do compress for %s now...'%uid) print("Starting analysis for %s now..." % uid) - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): # if db[uid]['start']['dtype'] =='xpcs': - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) - ##corect some metadata + ##correct some metadata if md_update is not None: md.update(md_update) # if 'username' in list(md.keys()): @@ -646,13 +673,17 @@ def realtime_xpcs_analysis( if not emulation: # suid=uid[:6] run_xpcs_xsvs_single( - uid, run_pargs=run_pargs, md_cor=None, return_res=False, clear_plot=clear_plot + uid, + run_pargs=run_pargs, + md_cor=None, + return_res=False, + clear_plot=clear_plot, ) # update_olog_uid( uid= md['uid'], text='Data are on-line sparsified!',attachments=None) except: print("There are something wrong with this data: %s..." % uid) else: - print("\nThis is not a XPCS series. We will simiply ignore it.") + print("\nThis is not a XPCS series. We will simply ignore it.") print("*" * 50) # print( 'Sleep 10 sec here!!!') @@ -679,7 +710,7 @@ def compress_multi_uids( Parameters: uids: list, a list of uid mask: bool array, mask array - force_compress: default is False, just load the compresssed data; + force_compress: default is False, just load the compressed data; if True, will compress it to overwrite the old compressed data para_compress: apply the parallel compress algorithm bin_frame_number: @@ -736,11 +767,11 @@ def compress_multi_uids( #################################################################################################### -##get_two_time_mulit_uids, sequential cal for uids, but apply parallel for each uid ## +##get_two_time_multi_uids, sequential cal for uids, but apply parallel for each uid ## ################################################################################################# -def get_two_time_mulit_uids( +def get_two_time_multi_uids( uids, roi_mask, norm=None, @@ -753,20 +784,20 @@ def get_two_time_mulit_uids( compress_path=None, ): """Calculate two time correlation by using auto_two_Arrayc func for a set of uids, - if the two-time resutls are already created, by default (force_generate=False), just pass + if the two-time results are already created, by default (force_generate=False), just pass Parameters: uids: list, a list of uid roi_mask: bool array, roi mask array norm: the normalization array path: string, where to save the two time - force_generate: default, False, if the two-time resutls are already created, just pass + force_generate: default, False, if the two-time results are already created, just pass if True, will force to calculate two-time no matter exist or not Return: None, save the two-time in as path + uid + 'uid=%s_g12b'%uid e.g., - get_two_time_mulit_uids( guids, roi_mask, norm= norm,bin_frame_number=1, + get_two_time_multi_uids( guids, roi_mask, norm= norm,bin_frame_number=1, path= data_dir,force_generate=False ) """ @@ -786,7 +817,10 @@ def get_two_time_mulit_uids( if bin_frame_number == 1: filename = "%s" % compress_path + "uid_%s.cmp" % md["uid"] else: - filename = "%s" % compress_path + "uid_%s_bined--%s.cmp" % (md["uid"], bin_frame_number) + filename = "%s" % compress_path + "uid_%s_bined--%s.cmp" % ( + md["uid"], + bin_frame_number, + ) FD = Multifile(filename, 0, N // bin_frame_number) # print( FD.beg, FD.end) @@ -797,17 +831,30 @@ def get_two_time_mulit_uids( if not force_generate: if os.path.exists(filename + ".npy"): doit = False - print("The two time correlation function for uid=%s is already calculated. Just pass..." % uid) + print( + "The two time correlation function for uid=%s is already calculated. Just pass..." + % uid + ) if doit: data_pixel = Get_Pixel_Arrayc(FD, pixelist, norm=norm).get_data() g12b = auto_two_Arrayc(data_pixel, roi_mask, index=None) np.save(filename, g12b) del g12b - print("The two time correlation function for uid={} is saved as {}.".format(uid, filename)) + print( + "The two time correlation function for uid={} is saved as {}.".format( + uid, filename + ) + ) def get_series_g2_from_g12( - g12b, fra_num_by_dose=None, dose_label=None, good_start=0, log_taus=True, num_bufs=8, time_step=1 + g12b, + fra_num_by_dose=None, + dose_label=None, + good_start=0, + log_taus=True, + num_bufs=8, + time_step=1, ): """ Get a series of one-time function from two-time by giving noframes @@ -819,7 +866,7 @@ def get_series_g2_from_g12( will use g12b length to replace this number by default is None, will = [ g12b.shape[0] ] dose_label: the label of each dose, also is the keys of returned g2, lag - log_taus: if true, will only return a g2 with the correponding tau values + log_taus: if true, will only return a g2 with the corresponding tau values as calculated by multi-tau defined taus Return: @@ -841,18 +888,22 @@ def get_series_g2_from_g12( # print( good_end ) if good_end > L: warnings.warn( - "Warning: the dose value is too large, and please check the maxium dose in this data set and give a smaller dose value. We will use the maxium dose of the data." + "Warning: the dose value is too large, and please check the maximum dose in this data set and give a smaller dose value. We will use the maximum dose of the data." ) good_end = L if not log_taus: - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :]) + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + ) else: # print( good_end, num_bufs ) lag_step = get_multi_tau_lag_steps(good_end, num_bufs) lag_step = lag_step[lag_step < good_end - good_start] # print( len(lag_steps ) ) lag_steps[key] = lag_step * time_step - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :])[lag_step] + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + )[lag_step] return lag_steps, g2 @@ -861,10 +912,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -877,7 +928,7 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): return np.int_(np.array(exp_dose) / (exp_time + dead_time) / att) -def get_series_one_time_mulit_uids( +def get_series_one_time_multi_uids( uids, qval_dict, trans=None, @@ -891,7 +942,7 @@ def get_series_one_time_mulit_uids( imgs=None, direct_load_data=False, ): - """Calculate a dose depedent series of one time correlations from two time + """Calculate a dose dependent series of one time correlations from two time Parameters: uids: list, a list of uid trans: list, same length as uids, the transmission list @@ -910,7 +961,9 @@ def get_series_one_time_mulit_uids( """ if path is None: - print("Please calculate two time function first by using get_two_time_mulit_uids function.") + print( + "Please calculate two time function first by using get_two_time_multi_uids function." + ) else: taus_uids = {} g2_uids = {} @@ -962,7 +1015,10 @@ def get_series_one_time_mulit_uids( else: transi = trans[i] fra_num_by_dose = get_fra_num_by_dose( - exp_dose=exposure_dose, exp_time=exp_time, dead_time=dead_time, att=transi + exp_dose=exposure_dose, + exp_time=exp_time, + dead_time=dead_time, + att=transi, ) print("uid: %s--> fra_num_by_dose: %s" % (uid, fra_num_by_dose)) @@ -1093,20 +1149,22 @@ def plot_dose_g2( # return taus_dict, g2_dict -def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False): +def run_xpcs_xsvs_single( + uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False +): """Y.G. Dec 22, 2016 Run XPCS XSVS analysis for a single uid Parameters: uid: unique id run_pargs: dict, control run type and setup parameters, such as q range et.al. - reverse:,True, revserse the image upside down + reverse:,True, reverse the image upside down Return: save analysis result to csv/png/h5 files return_res: if true, return a dict, containing g2,g4,g12,contrast et.al. depending on the run type An example for the run_pargs: run_pargs= dict( - scat_geometry = 'gi_saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) + scat_geometry = 'gi_saxs' #support 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) force_compress = True,#False, para_compress = True, run_fit_form = False, @@ -1219,7 +1277,12 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if scat_geometry == "gi_saxs": refl_x0 = run_pargs["refl_x0"] refl_y0 = run_pargs["refl_y0"] - Qr, Qz, qr_map, qz_map = run_pargs["Qr"], run_pargs["Qz"], run_pargs["qr_map"], run_pargs["qz_map"] + Qr, Qz, qr_map, qz_map = ( + run_pargs["Qr"], + run_pargs["Qz"], + run_pargs["qr_map"], + run_pargs["qz_map"], + ) taus = None g2 = None @@ -1269,7 +1332,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if md["detector"] == "eiger1m_single_image": Chip_Mask = np.load("/XF11ID/analysis/2017_1/masks/Eiger1M_Chip_Mask.npy") elif md["detector"] == "eiger4m_single_image" or md["detector"] == "image": - Chip_Mask = np.array(np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool) + Chip_Mask = np.array( + np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool + ) BadPix = np.load("/XF11ID/analysis/2018_1/BadPix_4M.npy") Chip_Mask.ravel()[BadPix] = 0 elif md["detector"] == "eiger500K_single_image": @@ -1278,7 +1343,10 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= Chip_Mask = 1 # show_img(Chip_Mask) - center = [int(md["beam_center_y"]), int(md["beam_center_x"])] # beam center [y,x] for python image + center = [ + int(md["beam_center_y"]), + int(md["beam_center_x"]), + ] # beam center [y,x] for python image pixel_mask = 1 - np.int_(np.array(imgs.md["pixel_mask"], dtype=bool)) print("The data are: %s" % imgs) @@ -1323,7 +1391,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) # print_dict( setup_pargs ) - mask = load_mask(mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse) + mask = load_mask( + mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse + ) mask *= pixel_mask if md["detector"] == "eiger4m_single_image": mask[:, 2069] = 0 # False #Concluded from the previous results @@ -1366,7 +1436,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= photon_occ = len(np.where(avg_img)[0]) / (imgsa[0].size) # compress = photon_occ < .4 #if the photon ocupation < 0.5, do compress print("The non-zeros photon occupation is %s." % (photon_occ)) - print("Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process.") + print( + "Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process." + ) # good_start = 5 #make the good_start at least 0 t0 = time.time() filename = "/XF11ID/analysis/Compressed_Data" + "/uid_%s.cmp" % md["uid"] @@ -1391,7 +1463,15 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) print(uid_) plot1D( - y=imgsum[np.array([i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])], + y=imgsum[ + np.array( + [ + i + for i in np.arange(good_start, len(imgsum)) + if i not in bad_frame_list + ] + ) + ], title=uidstr + "_imgsum", xlabel="Frame", ylabel="Total_Intensity", @@ -1402,7 +1482,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= mask = mask * Chip_Mask # %system free && sync && echo 3 > /proc/sys/vm/drop_caches && free - ## Get bad frame list by a polynominal fit + ## Get bad frame list by a polynomial fit bad_frame_list = get_bad_frame_list( imgsum, fit=True, @@ -1432,10 +1512,15 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= cmap=cmap_albula, ) - imgsum_y = imgsum[np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list])] + imgsum_y = imgsum[ + np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list]) + ] imgsum_x = np.arange(len(imgsum_y)) save_lists( - [imgsum_x, imgsum_y], label=["Frame", "Total_Intensity"], filename=uidstr + "_img_sum_t", path=data_dir + [imgsum_x, imgsum_y], + label=["Frame", "Total_Intensity"], + filename=uidstr + "_img_sum_t", + path=data_dir, ) plot1D( y=imgsum_y, @@ -1449,7 +1534,6 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ############for SAXS and ANG_SAXS (Flow_SAXS) if scat_geometry == "saxs" or scat_geometry == "ang_saxs": - # show_saxs_qmap( avg_img, setup_pargs, width=600, vmin=.1, vmax=np.max(avg_img*.1), logs=True, # image_name= uidstr + '_img_avg', save=True) # np.save( data_dir + 'uid=%s--img-avg'%uid, avg_img) @@ -1460,7 +1544,10 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # hmask=1 hmask = 1 qp_saxs, iq_saxs, q_saxs = get_circular_average( - avg_img * Chip_Mask, mask * hmask * Chip_Mask, pargs=setup_pargs, save=True + avg_img * Chip_Mask, + mask * hmask * Chip_Mask, + pargs=setup_pargs, + save=True, ) plot_circular_average( @@ -1520,10 +1607,14 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if scat_geometry != "ang_saxs": Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) - qpt, iqst, qt = get_t_iqc(FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500) + qpt, iqst, qt = get_t_iqc( + FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500 + ) plot_t_iqc( qt, iqst, @@ -1538,7 +1629,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # roi_mask[badpixel] = 0 qr = np.array([qval_dict[k][0] for k in list(qval_dict.keys())]) show_ROI_on_image( - avg_img, roi_mask, label_on=True, alpha=0.5, save=True, path=data_dir, uid=uidstr + avg_img, + roi_mask, + label_on=True, + alpha=0.5, + save=True, + path=data_dir, + uid=uidstr, ) # , vmin=1, vmax=15) elif scat_geometry == "gi_saxs": @@ -1553,9 +1650,28 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) ticks_ = get_qzr_map(qr_map, qz_map, inc_x0, Nzline=10, Nrline=10) ticks = ticks_[:4] - plot_qzr_map(qr_map, qz_map, inc_x0, ticks=ticks_, data=avg_img, uid=uidstr, path=data_dir) - show_qzr_roi(avg_img, roi_mask, inc_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uidstr) - qr_1d_pds = cal_1d_qr(avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs) + plot_qzr_map( + qr_map, + qz_map, + inc_x0, + ticks=ticks_, + data=avg_img, + uid=uidstr, + path=data_dir, + ) + show_qzr_roi( + avg_img, + roi_mask, + inc_x0, + ticks, + alpha=0.5, + save=True, + path=data_dir, + uid=uidstr, + ) + qr_1d_pds = cal_1d_qr( + avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs + ) plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique(np.array(list(qval_dict.values()))[:, 0]), @@ -1566,9 +1682,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start - qrt_pds = get_t_qrc(FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr) + qrt_pds = get_t_qrc( + FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr + ) plot_qrt_pds(qrt_pds, time_edge, qz_index=0, uid=uidstr, path=data_dir) ############################## @@ -1576,11 +1696,27 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ######################################## if scat_geometry != "ang_saxs": roi_inten = check_ROI_intensity( - avg_img, roi_mask, ring_number=qth_interest, uid=uidstr, save=True, path=data_dir + avg_img, + roi_mask, + ring_number=qth_interest, + uid=uidstr, + save=True, + path=data_dir, ) - if scat_geometry == "saxs" or scat_geometry == "gi_saxs" or scat_geometry == "gi_waxs": + if ( + scat_geometry == "saxs" + or scat_geometry == "gi_saxs" + or scat_geometry == "gi_waxs" + ): if run_waterfall: - wat = cal_waterfallc(FD, roi_mask, qindex=qth_interest, save=True, path=data_dir, uid=uidstr) + wat = cal_waterfallc( + FD, + roi_mask, + qindex=qth_interest, + save=True, + path=data_dir, + uid=uidstr, + ) if run_waterfall: plot_waterfallc( wat, @@ -1598,7 +1734,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= times_roi, mean_int_sets = cal_each_ring_mean_intensityc( FD, roi_mask, timeperframe=None, multi_cor=True ) - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir + ) roi_avg = np.average(mean_int_sets, axis=0) uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) @@ -1618,7 +1756,12 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if "g2_fit_variables" in list(run_pargs.keys()): g2_fit_variables = run_pargs["g2_fit_variables"] else: - g2_fit_variables = {"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True} + g2_fit_variables = { + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + } if "g2_guess_values" in list(run_pargs.keys()): g2_guess_values = run_pargs["g2_guess_values"] @@ -1633,7 +1776,12 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if "g2_guess_limits" in list(run_pargs.keys()): g2_guess_limits = run_pargs["g2_guess_limits"] else: - g2_guess_limits = dict(baseline=[1, 2], alpha=[0, 2], beta=[0, 1], relaxation_rate=[0.001, 5000]) + g2_guess_limits = dict( + baseline=[1, 2], + alpha=[0, 2], + beta=[0, 1], + relaxation_rate=[0.001, 5000], + ) if run_one_time: if use_imgsum_norm: @@ -1643,7 +1791,14 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if scat_geometry != "ang_saxs": t0 = time.time() g2, lag_steps = cal_g2p( - FD, roi_mask, bad_frame_list, good_start, num_buf=8, num_lev=None, imgsum=imgsum_, norm=norm + FD, + roi_mask, + bad_frame_list, + good_start, + num_buf=8, + num_lev=None, + imgsum=imgsum_, + norm=norm, ) run_time(t0) taus = lag_steps * timeperframe @@ -1703,10 +1858,24 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= else: t0 = time.time() g2_v, lag_steps_v = cal_g2p( - FD, roi_mask_v, bad_frame_list, good_start, num_buf=8, num_lev=None, imgsum=imgsum_, norm=norm + FD, + roi_mask_v, + bad_frame_list, + good_start, + num_buf=8, + num_lev=None, + imgsum=imgsum_, + norm=norm, ) g2_p, lag_steps_p = cal_g2p( - FD, roi_mask_p, bad_frame_list, good_start, num_buf=8, num_lev=None, imgsum=imgsum_, norm=norm + FD, + roi_mask_p, + bad_frame_list, + good_start, + num_buf=8, + num_lev=None, + imgsum=imgsum_, + norm=norm, ) run_time(t0) @@ -1737,7 +1906,12 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= function=fit_g2_func_v, vlim=[0.95, 1.05], fit_range=None, - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, guess_values={ "baseline": 1.0, "beta": 0.05, @@ -1746,7 +1920,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= }, ) g2_fit_paras_v = save_g2_fit_para_tocsv( - g2_fit_result_v, filename=uid_ + "_g2_fit_paras_v.csv", path=data_dir + g2_fit_result_v, + filename=uid_ + "_g2_fit_paras_v.csv", + path=data_dir, ) fit_g2_func_p = "flow_para" # for parallel @@ -1772,7 +1948,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= }, ) g2_fit_paras_p = save_g2_fit_para_tocsv( - g2_fit_result_p, filename=uid_ + "_g2_fit_paras_p.csv", path=data_dir + g2_fit_result_p, + filename=uid_ + "_g2_fit_paras_p.csv", + path=data_dir, ) plot_g2_general( @@ -1804,13 +1982,18 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) combine_images( - [data_dir + uid_ + "_g2_v_fit.png", data_dir + uid_ + "_g2_p_fit.png"], + [ + data_dir + uid_ + "_g2_v_fit.png", + data_dir + uid_ + "_g2_p_fit.png", + ], data_dir + uid_ + "_g2_fit.png", outsize=(2000, 2400), ) D0_v, qrate_fit_res_v = get_q_rate_fit_general( - qval_dict_v, g2_fit_paras_v["relaxation_rate"], geometry=scat_geometry + qval_dict_v, + g2_fit_paras_v["relaxation_rate"], + geometry=scat_geometry, ) plot_q_rate_fit_general( qval_dict_v, @@ -1822,7 +2005,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) D0_p, qrate_fit_res_p = get_q_rate_fit_general( - qval_dict_p, g2_fit_paras_p["relaxation_rate"], geometry=scat_geometry + qval_dict_p, + g2_fit_paras_p["relaxation_rate"], + geometry=scat_geometry, ) plot_q_rate_fit_general( qval_dict_p, @@ -1834,7 +2019,10 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) combine_images( - [data_dir + uid_ + "_vert_Q_Rate_fit.png", data_dir + uid_ + "_para_Q_Rate_fit.png"], + [ + data_dir + uid_ + "_vert_Q_Rate_fit.png", + data_dir + uid_ + "_para_Q_Rate_fit.png", + ], data_dir + uid_ + "_Q_Rate_fit.png", outsize=(2000, 2400), ) @@ -1842,7 +2030,6 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # For two-time data_pixel = None if run_two_time: - data_pixel = Get_Pixel_Arrayc(FD, pixelist, norm=norm).get_data() t0 = time.time() g12b = auto_two_Arrayc(data_pixel, roi_mask, index=None) @@ -1911,7 +2098,10 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) D0b, qrate_fit_resb = get_q_rate_fit_general( - qval_dict, g2b_fit_paras["relaxation_rate"], fit_range=None, geometry=scat_geometry + qval_dict, + g2b_fit_paras["relaxation_rate"], + fit_range=None, + geometry=scat_geometry, ) # print( qval_dict, g2b_fit_paras['relaxation_rate'], qrate_fit_resb ) @@ -1981,8 +2171,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) if run_dose: - get_two_time_mulit_uids( - [uid], roi_mask, norm=norm, bin_frame_number=bin_frame_number, path=data_dir0, force_generate=False + get_two_time_multi_uids( + [uid], + roi_mask, + norm=norm, + bin_frame_number=bin_frame_number, + path=data_dir0, + force_generate=False, ) N = len(imgs) try: @@ -1995,7 +2190,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= dose_frame = np.int_([N / 8, N / 4, N / 2, 3 * N / 4, N * 0.99]) # N/32, N/16, N/8, N/4 ,N/2, 3*N/4, N*0.99 exposure_dose = tr * exposuretime * dose_frame - taus_uids, g2_uids = get_series_one_time_mulit_uids( + taus_uids, g2_uids = get_series_one_time_multi_uids( [uid], qval_dict, good_start=good_start, @@ -2023,7 +2218,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= append_name="", ) - # Speckel Visiblity + # Speckel Visibility if run_xsvs: max_cts = get_max_countc(FD, roi_mask) qind, pixelist = roi.extract_label_indices(roi_mask) @@ -2032,7 +2227,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # time_steps = np.array( utils.geometric_series(2, len(imgs) ) ) time_steps = [0, 1] # only run the first two levels num_times = len(time_steps) - times_xsvs = exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + times_xsvs = ( + exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + ) print("The max counts are: %s" % max_cts) ### Do historam @@ -2067,7 +2264,11 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) run_time(t0) spec_pds = save_bin_his_std( - spec_bins, spec_his, spec_std, filename=uid_ + "_spec_res.csv", path=data_dir + spec_bins, + spec_his, + spec_std, + filename=uid_ + "_spec_res.csv", + path=data_dir, ) ML_val, KL_val, K_ = get_xsvs_fit( @@ -2125,7 +2326,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ### Get contrast contrast_factorL = get_contrast(ML_val) spec_km_pds = save_KM( - spec_kmean, KL_val, ML_val, qs=qr, level_time=times_xsvs, uid=uid_, path=data_dir + spec_kmean, + KL_val, + ML_val, + qs=qr, + level_time=times_xsvs, + uid=uid_, + path=data_dir, ) # print( spec_km_pds ) @@ -2206,7 +2413,17 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= "bad_frame_list", "qr_1d_pds", ], - [md, roi_mask, qval_dict, avg_img, mask, pixel_mask, imgsum, bad_frame_list, qr_1d_pds], + [ + md, + roi_mask, + qval_dict, + avg_img, + mask, + pixel_mask, + imgsum, + bad_frame_list, + qr_1d_pds, + ], ): Exdt[k] = v elif scat_geometry == "saxs": @@ -2243,8 +2460,26 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= Exdt[k] = v elif scat_geometry == "gi_waxs": for k, v in zip( - ["md", "roi_mask", "qval_dict", "avg_img", "mask", "pixel_mask", "imgsum", "bad_frame_list"], - [md, roi_mask, qval_dict, avg_img, mask, pixel_mask, imgsum, bad_frame_list], + [ + "md", + "roi_mask", + "qval_dict", + "avg_img", + "mask", + "pixel_mask", + "imgsum", + "bad_frame_list", + ], + [ + md, + roi_mask, + qval_dict, + avg_img, + mask, + pixel_mask, + imgsum, + bad_frame_list, + ], ): Exdt[k] = v elif scat_geometry == "ang_saxs": @@ -2287,33 +2522,53 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= Exdt["mean_int_sets"] = mean_int_sets if run_one_time: if scat_geometry != "ang_saxs": - for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): + for k, v in zip( + ["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras] + ): Exdt[k] = v else: - for k, v in zip(["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v]): + for k, v in zip( + ["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v] + ): Exdt[k] = v - for k, v in zip(["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p]): + for k, v in zip( + ["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p] + ): Exdt[k] = v if run_two_time: - for k, v in zip(["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b]): + for k, v in zip( + ["tausb", "g2b", "g2b_fit_paras", "g12b"], + [tausb, g2b, g2b_fit_paras, g12b], + ): Exdt[k] = v if run_four_time: for k, v in zip(["taus4", "g4"], [taus4, g4]): Exdt[k] = v if run_xsvs: for k, v in zip( - ["spec_kmean", "spec_pds", "times_xsvs", "spec_km_pds", "contrast_factorL"], + [ + "spec_kmean", + "spec_pds", + "times_xsvs", + "spec_km_pds", + "contrast_factorL", + ], [spec_kmean, spec_pds, times_xsvs, spec_km_pds, contrast_factorL], ): Exdt[k] = v - export_xpcs_results_to_h5("uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt) + export_xpcs_results_to_h5( + "uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt + ) # extract_dict = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%md['uid'], import_dir = data_dir ) # Creat PDF Report pdf_out_dir = os.path.join("/XF11ID/analysis/", CYCLE, username, "Results/") pdf_filename = "XPCS_Analysis_Report_for_uid=%s%s.pdf" % (uid, pdf_version) if run_xsvs: - pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf" % (uid, pdf_version) + pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf" % ( + uid, + pdf_version, + ) # pdf_filename print(data_dir, uid[:6], pdf_out_dir, pdf_filename, username) @@ -2339,7 +2594,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= pname = pdf_out_dir + pdf_filename atch = [Attachment(open(pname, "rb"))] try: - update_olog_uid(uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch) + update_olog_uid( + uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch + ) except: print( "I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file." @@ -2369,7 +2626,19 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= "roi_mask", "qval_dict", ], - [md, q_saxs, iq_saxs, iqst, qt, avg_img, mask, imgsum, bad_frame_list, roi_mask, qval_dict], + [ + md, + q_saxs, + iq_saxs, + iqst, + qt, + avg_img, + mask, + imgsum, + bad_frame_list, + roi_mask, + qval_dict, + ], ): res[k] = v @@ -2419,14 +2688,42 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= "bad_frame_list", "qr_1d_pds", ], - [md, roi_mask, qval_dict, avg_img, mask, pixel_mask, imgsum, bad_frame_list, qr_1d_pds], + [ + md, + roi_mask, + qval_dict, + avg_img, + mask, + pixel_mask, + imgsum, + bad_frame_list, + qr_1d_pds, + ], ): res[k] = v elif scat_geometry == "gi_waxs": for k, v in zip( - ["md", "roi_mask", "qval_dict", "avg_img", "mask", "pixel_mask", "imgsum", "bad_frame_list"], - [md, roi_mask, qval_dict, avg_img, mask, pixel_mask, imgsum, bad_frame_list], + [ + "md", + "roi_mask", + "qval_dict", + "avg_img", + "mask", + "pixel_mask", + "imgsum", + "bad_frame_list", + ], + [ + md, + roi_mask, + qval_dict, + avg_img, + mask, + pixel_mask, + imgsum, + bad_frame_list, + ], ): res[k] = v diff --git a/pyCHX/movie_maker.py b/pyCHX/movie_maker.py index 0d42cf9..87240e4 100644 --- a/pyCHX/movie_maker.py +++ b/pyCHX/movie_maker.py @@ -23,7 +23,7 @@ def select_regoin( defined by verts e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end (dimy, dimx,) = img.shape - Giving cut postion, start, end, width""" + Giving cut position, start, end, width""" import numpy as np xs, xe, ys, ye = vert @@ -56,10 +56,17 @@ def select_regoin( def save_png_series( - imgs, ROI=None, logs=True, outDir=None, uid=None, vmin=None, vmax=None, cmap="viridis", dpi=100 + imgs, + ROI=None, + logs=True, + outDir=None, + uid=None, + vmin=None, + vmax=None, + cmap="viridis", + dpi=100, ): import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -104,10 +111,21 @@ def save_png_series( if not logs: im = ax.imshow( - i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax + i0, + origin="lower", + cmap=cmap, + interpolation="nearest", + vmin=vmin, + vmax=vmax, ) # vmin=0,vmax=1, else: - im = ax.imshow(i0, origin="lower", cmap=cmap, interpolation="nearest", norm=LogNorm(vmin, vmax)) + im = ax.imshow( + i0, + origin="lower", + cmap=cmap, + interpolation="nearest", + norm=LogNorm(vmin, vmax), + ) # ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18) # fig.set_size_inches( [5., 5 * asp] ) # plt.tight_layout() @@ -135,7 +153,6 @@ def movie_maker( ): import matplotlib.animation as animation import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -200,12 +217,22 @@ def movie_maker( # print( cmap, vmin, vmax ) if not logs: - im = ax.imshow(i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax) + im = ax.imshow( + i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax + ) else: - im = ax.imshow(i0, origin="lower", cmap=cmap, interpolation="nearest", norm=LogNorm(vmin, vmax)) + im = ax.imshow( + i0, + origin="lower", + cmap=cmap, + interpolation="nearest", + norm=LogNorm(vmin, vmax), + ) # ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18) - ttl = ax.text(0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18) + ttl = ax.text( + 0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18 + ) # print asp # fig.set_size_inches( [5., 5 * asp] ) diff --git a/pyCHX/v2/_commonspeckle/DEVs.py b/pyCHX/v2/_commonspeckle/DEVs.py index 19fd4e5..35aa061 100644 --- a/pyCHX/v2/_commonspeckle/DEVs.py +++ b/pyCHX/v2/_commonspeckle/DEVs.py @@ -1,7 +1,6 @@ # simple brute force multitau # from pyCHX.chx_generic_functions import average_array_withNan import numpy as np -import skbeam.core.roi as roi from numpy.fft import fft, ifft from tqdm import tqdm @@ -18,7 +17,7 @@ def fit_one_peak_curve(x, y, fit_range): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -164,7 +163,7 @@ def get_oneQ_g2_fft(time_inten_oneQ, axis=0): Input: time_inten_oneQ: 2d-array, shape=[time, pixel number in the ROI], a time dependent intensity for a list of pixels - ( the equivilent pixels belongs to one Q ) + ( the equivalent pixels belongs to one Q ) Return: G/(P*F) """ @@ -202,7 +201,7 @@ def get_g2_PF(time_inten): def auto_correlation_fft_padding_zeros(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -228,7 +227,8 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): # print(M, N, 2*N-1) cor = np.real( ifft( - fft(a, n=N * 2 - 1, axis=axis) * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), + fft(a, n=N * 2 - 1, axis=axis) + * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), n=N * 2 - 1, axis=axis, ) @@ -246,7 +246,7 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): def auto_correlation_fft(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -307,7 +307,7 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): / noperbin ) G2[j, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t - for l in tqdm(np.arange(1, lvl), desc="Calcuate g2..."): + for l in tqdm(np.arange(1, lvl), desc="Calculate g2..."): nn = dII.shape[0] // 2 * 2 # make it even dII = (dII[0:nn:2, :] + dII[1:nn:2, :]) / 2.0 # sum in pairs nn = nn // 2 @@ -321,7 +321,9 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): * np.bincount(bind, np.mean(dII[:-j, :], axis=0)) / noperbin ) - G2[ind, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + G2[ind, :] = ( + np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + ) # print(ind) # print(time.time()-t0) return (tt[: ind + 1], G2[: ind + 1, :]) @@ -329,10 +331,10 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -353,7 +355,9 @@ def average_array_withNan(array, axis=0, mask=None): return sums / cts -def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True): +def autocor_for_pix_time( + pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True +): """YG Feb 20, 2018@CHX Do correlation for pixel_time type data with tau as defined as dly Input: @@ -373,7 +377,7 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No Gp = np.zeros([Ntau, Np]) Gf = np.zeros([Ntau, Np]) # mask_pix = np.isnan(pix_time_data) - # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calcuate g2...' ): + # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calculate g2...' ): tau_ind = 0 # if multi_tau_method: pix_time_datac = pix_time_data.copy() @@ -383,14 +387,18 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No if frame_norm is not None: pix_time_datac /= frame_norm - for tau_lev, tau_key in tqdm(enumerate(list(dly_dict.keys())), desc="Calcuate g2..."): + for tau_lev, tau_key in tqdm( + enumerate(list(dly_dict.keys())), desc="Calculate g2..." + ): # print(tau_key) taus = dly_dict[tau_key] if multi_tau_method: if tau_lev > 0: nobuf = len(dly_dict[1]) nn = pix_time_datac.shape[0] // 2 * 2 # make it even - pix_time_datac = (pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :]) / 2.0 # sum in pairs + pix_time_datac = ( + pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :] + ) / 2.0 # sum in pairs nn = nn // 2 if nn < nobuf: break @@ -450,7 +458,6 @@ def autocor_xytframe(self, n): ###################For Fit import matplotlib.pyplot as plt -import numpy as np from scipy.optimize import leastsq # duplicate my curfit function from yorick, except use sigma and not w @@ -465,7 +472,9 @@ def curfit(x, y, a, sigy=None, function_name=None, adj=None): function_name = funct # print( a, adj, a[adj] ) # print(x,y,a) - afit, cv, idt, m, ie = leastsq(_residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True) + afit, cv, idt, m, ie = leastsq( + _residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True + ) a[adj] = afit realcv = np.identity(afit.size) realcv[np.ix_(adj, adj)] = cv @@ -497,12 +506,15 @@ def fitpr(chisq, a, sigmaa, title=None, lbl=None): lbl = [] for i in xrange(a.size): lbl.append("A%(#)02d" % {"#": i}) - # print resuls of a fit. + # print results of a fit. if title != None: print(title) print(" chisq=%(c).4f" % {"c": chisq}) for i in range(a.size): - print(" %(lbl)8s =%(m)10.4f +/- %(s).4f" % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]}) + print( + " %(lbl)8s =%(m)10.4f +/- %(s).4f" + % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]} + ) # easy plot for fit @@ -525,7 +537,9 @@ def Gaussian(x, p): """ xo, amplitude, sigma, offset = p - g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-1 / 2.0 * (x - xo) ** 2 / sigma**2) + g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp( + -1 / 2.0 * (x - xo) ** 2 / sigma**2 + ) return g @@ -564,7 +578,8 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): th_list = np.linspace(th_min, th_max, th_n + 1) r_list = np.linspace(r_min, 1, r_n + 1) regions_list = [ - [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] for _ in range(th_n) + [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] + for _ in range(th_n) ] w = int(np.ceil(a * 2)) h = int(np.ceil(b * 2)) @@ -578,12 +593,18 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): cur_r = np.sqrt(cur_x**2 + cur_y**2) cur_elps_r = elps_r(a, b, cur_theta) cur_r_list = r_list * cur_elps_r - cur_theta = np.rad2deg(cur_theta) # Convert to degrees to compare with th_list + cur_theta = np.rad2deg( + cur_theta + ) # Convert to degrees to compare with th_list r_ind = place_in_interval(cur_r, cur_r_list) th_ind = place_in_interval(cur_theta, th_list) if (r_ind != -1) and (th_ind != -1): - regions_list[th_ind][r_ind][0] = np.append(regions_list[th_ind][r_ind][0], ii + x_offset) - regions_list[th_ind][r_ind][1] = np.append(regions_list[th_ind][r_ind][1], jj + y_offset) + regions_list[th_ind][r_ind][0] = np.append( + regions_list[th_ind][r_ind][0], ii + x_offset + ) + regions_list[th_ind][r_ind][1] = np.append( + regions_list[th_ind][r_ind][1], jj + y_offset + ) sectors = [] for th_reg_list in regions_list: for sector in th_reg_list: diff --git a/pyCHX/v2/_commonspeckle/DataGonio.py b/pyCHX/v2/_commonspeckle/DataGonio.py index 686e7f0..479258c 100644 --- a/pyCHX/v2/_commonspeckle/DataGonio.py +++ b/pyCHX/v2/_commonspeckle/DataGonio.py @@ -1,21 +1,19 @@ # import sys -import os -import re # Regular expressions -import sys -import matplotlib as mpl import numpy as np # from scipy.optimize import leastsq # import scipy.special import PIL # Python Image Library (for opening PNG, etc.) -import pylab as plt -import skbeam.core.correlation as corr -import skbeam.core.roi as roi import skbeam.core.utils as utils -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) -from pyCHX.v2._commonspeckle.chx_generic_functions import average_array_withNan # common +from pyCHX.v2._commonspeckle.chx_generic_functions import ( + average_array_withNan, +) # common def convert_Qmap( @@ -151,7 +149,9 @@ def get_QPhiMap(img_shape, center): return q_map, phi_map -def get_img_qphimap(img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean"): +def get_img_qphimap( + img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean" +): """Y.G., Dev Nov 10, 2018 Get phi_map by giving image e.g., q_map, phi_map = get_QPhiMap( mask.shape, center[::-1]) @@ -536,7 +536,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -551,7 +553,7 @@ def _generate_qxyz_maps(self): ################################################################################ class CalibrationGonio(Calibration): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -596,7 +598,7 @@ def set_angles( self.sam_chi = sam_chi self.sam_theta = sam_theta - def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): + def rotation_matrix(self, sam_phi, sam_theta, sam_chi, degrees=True): """ sam_phi, rotate along lab-frame x, CHX phi sam_chi, rotate along lab-frame z, CHX chi @@ -636,11 +638,13 @@ def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): Rxy = np.dot(Rx, Ry) return np.dot(Rxy, Rz) - def _generate_qxyz_map_SF_from_Lab(self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True): + def _generate_qxyz_map_SF_from_Lab( + self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True + ): """ Convert qmap from Lab frame to sample frame """ - self.Rot = self.rotation_matix(sam_phi, sam_theta, sam_chi, degrees=degrees) + self.Rot = self.rotation_matrix(sam_phi, sam_theta, sam_chi, degrees=degrees) qsx, qsy, qsz = np.dot(self.Rot, [np.ravel(qx), np.ravel(qy), np.ravel(qz)]) return qsx.reshape(qx.shape), qsy.reshape(qy.shape), qsz.reshape(qz.shape) @@ -662,10 +666,14 @@ def _generate_qxyz_maps_samFrame(self, degrees=True): self.sam_chi, degrees=degrees, ) - self.qr_map_lab_data = np.sqrt(np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data)) + self.qr_map_lab_data = np.sqrt( + np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) + ) self.q_map_lab_data = np.sqrt( - np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) + np.square(self.qz_map_lab_data) + np.square(self.qx_map_lab_data) + + np.square(self.qy_map_lab_data) + + np.square(self.qz_map_lab_data) ) def get_ratioDw(self): @@ -687,9 +695,9 @@ def angle_map(self): return self.angle_map_data - def _generate_qxyz_maps_no_offest(self): + def _generate_qxyz_maps_no_offset(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -708,10 +716,13 @@ def _generate_qxyz_maps_no_offest(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * ( - X_c * np.cos(phi_g) - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + X_c * np.cos(phi_g) + - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) ) qy_c = k_over_Dprime * ( - X_c * np.sin(phi_g) + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) - Dprime + X_c * np.sin(phi_g) + + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + - Dprime ) qz_c = -1 * k_over_Dprime * (d * np.sin(theta_g) + Y_c * np.cos(theta_g)) @@ -733,7 +744,9 @@ def _generate_qxyz_maps_no_offest(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -747,7 +760,7 @@ def _generate_qxyz_maps_no_offest(self): def _generate_qxyz_maps(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D YG add offset corrections at Sep 21, 2017 @@ -791,8 +804,14 @@ def _generate_qxyz_maps(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * (X_c * np.cos(phi_g) - np.sin(phi_g) * yprime + offset_x) - qy_c = k_over_Dprime * (X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime) - qz_c = -1 * k_over_Dprime * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + qy_c = k_over_Dprime * ( + X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime + ) + qz_c = ( + -1 + * k_over_Dprime + * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + ) qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c)) q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c)) @@ -819,7 +838,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data1 = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data1 = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data1 = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data1 = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data1 = np.sign(self.qx_map_data1) * np.sqrt( diff --git a/pyCHX/v2/_commonspeckle/SAXS.py b/pyCHX/v2/_commonspeckle/SAXS.py index afdfe17..cadbd7d 100644 --- a/pyCHX/v2/_commonspeckle/SAXS.py +++ b/pyCHX/v2/_commonspeckle/SAXS.py @@ -5,11 +5,15 @@ """ # import numpy as np -from lmfit import Model, Parameter, Parameters, fit_report, minimize, report_fit -from scipy.optimize import curve_fit, least_squares, leastsq -from scipy.special import gamma, gammaln +from lmfit import Model, Parameters, minimize +from scipy.optimize import leastsq +from scipy.special import gamma -from pyCHX.v2._commonspeckle.chx_generic_functions import find_index, plot1D, show_img # common +from pyCHX.v2._commonspeckle.chx_generic_functions import ( + find_index, + plot1D, + show_img, +) # common # import matplotlib as mpl # import matplotlib.pyplot as plt @@ -92,7 +96,7 @@ def poly_sphere_form_factor_intensity( radius/R: in A sigma:sqrt root of variance in percent delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2 - fit_func: G: Guassian;S: Flory–Schulz distribution + fit_func: G: Gaussian;S: Flory–Schulz distribution Output: The form factor intensity of the polydispersed scatter """ @@ -104,7 +108,9 @@ def poly_sphere_form_factor_intensity( if sigma == 0: v = mono_sphere_form_factor_intensity(q, R, delta_rho) else: - r, rs, wt = distribution_func(radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func) + r, rs, wt = distribution_func( + radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func + ) for i, Ri in enumerate(r): # print(Ri, wt[i],delta_rho, rs) v += mono_sphere_form_factor_intensity(q, Ri, delta_rho) * wt[i] * rs @@ -124,7 +130,9 @@ def poly_sphere_form_factor_intensity_q2( The form factor intensity of the polydispersed scatter """ - return poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 # * scale + baseline + return ( + poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 + ) # * scale + baseline def find_index_old(x, x0, tolerance=None): @@ -149,7 +157,9 @@ def find_index_old(x, x0, tolerance=None): return position -def form_factor_residuals(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background @@ -209,7 +219,9 @@ def form_factor_residuals_bg( return np.sqrt(np.abs(err)) -def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -234,7 +246,9 @@ def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", return err -def form_factor_residuals_bg_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_bg_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -301,7 +315,7 @@ def get_form_factor_fit_lmfit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -429,7 +443,7 @@ def get_form_factor_fit2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -522,7 +536,9 @@ def get_form_factor_fit2( ) if (len(iq_) > len(p)) and pcov is not None: - s_sq = (fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function)).sum() / (len(iq_) - len(p)) + s_sq = ( + fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function) + ).sum() / (len(iq_) - len(p)) pcov = pcov * s_sq else: pcov = np.inf @@ -572,7 +588,7 @@ def get_form_factor_fit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -584,7 +600,10 @@ def get_form_factor_fit( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -632,7 +651,9 @@ def get_form_factor_fit( return result, q_ -def plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs): +def plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs +): if res_pargs is not None: uid = res_pargs["uid"] path = res_pargs["path"] @@ -724,7 +745,7 @@ def fit_form_factor( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -740,7 +761,9 @@ def fit_form_factor( function=function, fit_func=fit_func, ) - plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig) + plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig + ) return result @@ -779,7 +802,7 @@ def fit_form_factor2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -804,7 +827,10 @@ def fit_form_factor2( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -1017,7 +1043,9 @@ def show_saxs_qmap( ##Fit sphere by scipy.leastsq fit -def fit_sphere_form_factor_func(parameters, ydata, xdata, yerror=None, nonvariables=None): +def fit_sphere_form_factor_func( + parameters, ydata, xdata, yerror=None, nonvariables=None +): """##Develop by YG at July 28, 2017 @CHX This function is for fitting form factor of polyderse spherical particles by using scipy.leastsq fit diff --git a/pyCHX/v2/_commonspeckle/Stitching.py b/pyCHX/v2/_commonspeckle/Stitching.py index 8658290..b814690 100644 --- a/pyCHX/v2/_commonspeckle/Stitching.py +++ b/pyCHX/v2/_commonspeckle/Stitching.py @@ -1,6 +1,4 @@ -import os import re -import sys import matplotlib.pyplot as plt import numpy as np @@ -8,7 +6,9 @@ from scipy.signal import savgol_filter as sf from pyCHX.v2._commonspeckle.chx_generic_functions import plot1D, show_img # common -from pyCHX.v2._commonspeckle.DataGonio import convert_Qmap # common #TODO how much overlap with skbeam.core.recip +from pyCHX.v2._commonspeckle.DataGonio import ( + convert_Qmap, +) # common #TODO how much overlap with skbeam.core.recip def get_base_all_filenames(inDir, base_filename_cut_length=-7): @@ -19,14 +19,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -82,10 +84,10 @@ def Correct_Overlap_Images_Intensities( Return: data: array, stitched image with corrected intensity dataM: dict, each value is the image with correted intensity - scale: scale for each image, the first scale=1 by defination + scale: scale for each image, the first scale=1 by definition scale_smooth: smoothed scale - Exampe: + Example: data, dataM, scale,scale_smooth = Correct_Overlap_Images_Intensities( infiles, window_length=101, polyorder=5, overlap_width=58, badpixel_width =10 ) @@ -139,7 +141,9 @@ def Correct_Overlap_Images_Intensities( mode="mirror", cval=0.0, ) - data[:, a1:a2] = d[:, b1:b2] * np.repeat(scale_smooth[i], b2 - b1, axis=0).reshape([M, b2 - b1]) + data[:, a1:a2] = d[:, b1:b2] * np.repeat( + scale_smooth[i], b2 - b1, axis=0 + ).reshape([M, b2 - b1]) dataM[i] = np.zeros_like(dataM[i - 1]) dataM[i][:, 0 : w - ow] = dataM[i - 1][:, N - w : N - ow] dataM[i][:, w - ow :] = data[:, a1:a2] @@ -171,10 +175,12 @@ def check_overlap_scaling_factor(scale, scale_smooth, i=1, filename=None, save=F fig.savefig(filename) -def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None): +def stitch_WAXS_in_Qspace( + dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None +): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit @@ -224,16 +230,22 @@ def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, dM = np.rot90(dataM[i].T) D = dM.ravel() phi = phis[i] - calibration.set_angles(det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz) + calibration.set_angles( + det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz + ) calibration.clear_maps() QZ = calibration.qz_map().ravel() # [pixel_list] QX = calibration.qx_map().ravel() # [pixel_list] bins = [num_qz, num_qx] rangeq = [[qz_min, qz_max], [qx_min, qx_max]] # Nov 7,2017 using new func to qmap - remesh_data, zbins, xbins = convert_Qmap(dM, QZ, QX, bins=bins, range=rangeq, mask=mask) + remesh_data, zbins, xbins = convert_Qmap( + dM, QZ, QX, bins=bins, range=rangeq, mask=mask + ) # Normalize by the binning - num_per_bin, zbins, xbins = convert_Qmap(np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask) + num_per_bin, zbins, xbins = convert_Qmap( + np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask + ) # remesh_data, zbins, xbins = np.histogram2d(QZ, QX, bins=bins, range=rangeq, normed=False, weights=D) # Normalize by the binning @@ -251,8 +263,10 @@ def plot_qmap_in_folder(inDir): # TODO is this made for SMI as per docstring? """ import pickle as cpl - from pyCHX.v2._commonspeckle.chx_generic_functions import show_img # common #TODO why importing in a function? - from pyCHX.v2._commonspeckle.chx_libs import cmap_vge_hdr, plt # common #TODO why importing in a function? + from pyCHX.v2._commonspeckle.chx_libs import ( + cmap_vge_hdr, + plt, + ) # common #TODO why importing in a function? fp = get_base_all_filenames(inDir, base_filename_cut_length=-10) print( @@ -287,7 +301,7 @@ def plot_qmap_in_folder(inDir): # TODO is this made for SMI as per docstring? def get_qmap_range(calibration, phi_min, phi_max): """YG Sep 27@SMI Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -308,7 +322,9 @@ def get_qmap_range(calibration, phi_min, phi_max): return np.array([qx_start, qx_end, qz_start, qz_end]) -def get_phi(filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif"): +def get_phi( + filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif" +): pattern_re = "^.+\/?([a-zA-Z0-9_]+_)(\d\d\d\d\d\d)(\%s)$" % ext # print( pattern_re ) # pattern_re='^.+\/?([a-zA-Z0-9_]+_)(\d\d\d)(\.tif)$' @@ -345,7 +361,7 @@ def get_qmap_qxyz_range( ): """YG Nov 8, 2017@CHX Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -415,8 +431,8 @@ def stitch_WAXS_in_Qspace_CHX( dq=0.0008, ): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit diff --git a/pyCHX/v2/_commonspeckle/Two_Time_Correlation_Function.py b/pyCHX/v2/_commonspeckle/Two_Time_Correlation_Function.py index 6d05898..d9dc4d1 100644 --- a/pyCHX/v2/_commonspeckle/Two_Time_Correlation_Function.py +++ b/pyCHX/v2/_commonspeckle/Two_Time_Correlation_Function.py @@ -5,10 +5,7 @@ ###################################################################################### -import itertools -import sys import time -from datetime import datetime import matplotlib.pyplot as plt import numpy as np @@ -19,11 +16,10 @@ # from pyCHX.chx_libs import colors_ as mcolors, markers_ as markers from pyCHX.v2._commonspeckle.chx_libs import colors # common -from pyCHX.v2._commonspeckle.chx_libs import lstyles # common from pyCHX.v2._commonspeckle.chx_libs import RUN_GUI, Figure # common from pyCHX.v2._commonspeckle.chx_libs import markers from pyCHX.v2._commonspeckle.chx_libs import markers as markers_array -from pyCHX.v2._commonspeckle.chx_libs import markers_copy, mcolors, multi_tau_lags # common +from pyCHX.v2._commonspeckle.chx_libs import multi_tau_lags # common # from modest_image import ModestImage, imshow #common @@ -165,19 +161,23 @@ def run_time(t0): print("Total time: %.2f min" % (elapsed_time / 60.0)) -def get_each_frame_ROI_intensity(data_pixel, bad_pixel_threshold=1e10, plot_=False, *argv, **kwargs): +def get_each_frame_ROI_intensity( + data_pixel, bad_pixel_threshold=1e10, plot_=False, *argv, **kwargs +): """ Dec 16, 2015, Y.G.@CHX Get the ROI intensity of each frame Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: + Usage: imgsum, bad_frame_list = get_each_frame_intensity( data_pixel, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -250,7 +250,9 @@ def auto_two_Array(data, rois, data_pixel=None): sum1 = (np.average(data_pixel_qi, axis=1)).reshape(1, noframes) sum2 = sum1.T - g12b[:, :, qi - 1] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, qi - 1] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) # print ( proi, int( qi //( Unitq) ) ) # if int( qi //( Unitq) ) == proi: # sys.stdout.write("#") @@ -361,7 +363,7 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, a two correlation function, shape as ( imgs_length, imgs_length, noqs ) @@ -400,7 +402,9 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): return g2_aged -def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8): +def get_aged_g2_from_g12q( + g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8 +): """ @@ -410,7 +414,7 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ Dec 16, 2015, Y.G.@CHX Revised at April 19, 2017 Get one-time correlation function of different age from 1q-two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -472,11 +476,13 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ return lag_dict, g2_aged -def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1): +def get_aged_g2_from_g12q2( + g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1 +): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -501,7 +507,9 @@ def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slic arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) g2_aged = {} @@ -560,7 +568,9 @@ def show_g12q_aged_g2( age_center = np.array(list(sorted(g2_aged.keys()))) print("the cut age centers are: " + str(age_center)) - age_center = np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 # in pixel + age_center = ( + np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 + ) # in pixel M, N = g12q.shape # fig, ax = plt.subplots( figsize = (8,8) ) @@ -743,7 +753,9 @@ def plot_aged_g2(g2_aged, tau=None, timeperframe=1, ylim=None, xlim=None): # get fout-time -def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None): +def get_tau_from_g12q( + g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None +): """ Dec 16, 2015, Y.G.@CHX Get tau lines from two correlation function @@ -773,7 +785,9 @@ def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_ arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) tau = {} @@ -859,7 +873,9 @@ def show_g12q_taus(g12q, taus, slice_width=10, timeperframe=1, vmin=1, vmax=1.25 for i in sorted(taus.keys()): gx = np.arange(len(taus[i])) * timeperframe marker = next(markers) - ax1.plot(gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe)) + ax1.plot( + gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe) + ) ax1.set_ylim(vmin, vmax) ax1.set_xlabel(r"$t (s)$", fontsize=5) ax1.set_ylabel("g2") @@ -903,7 +919,7 @@ def histogram_taus(taus, hisbin=20, plot=True, timeperframe=1): if plot: fig, ax1 = plt.subplots(figsize=(8, 8)) - ax1.set_title("Tau_histgram") + ax1.set_title("Tau_histogram") for key in sorted(his.keys()): tx = 0.5 * (his[key][1][:-1] + his[key][1][1:]) marker = next(markers) @@ -916,7 +932,7 @@ def histogram_taus(taus, hisbin=20, plot=True, timeperframe=1): # ax1.set_ylim( 1.05,1.35 ) ax1.set_xlim(1.05, 1.35) ax1.set_xlabel(r"$g_2$", fontsize=19) - ax1.set_ylabel(r"histgram of g2 @ tau", fontsize=15) + ax1.set_ylabel(r"histogram of g2 @ tau", fontsize=15) # ax1.set_xscale('log') ax1.legend(fontsize="large", loc="best") # plt.show() @@ -933,7 +949,7 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -963,7 +979,9 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): yn = norms[:, q] yn1 = np.average(yn[tau:]) yn2 = np.average(yn[: m - tau]) - g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / (yn1 * yn2 * nopr[q]) + g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / ( + yn1 * yn2 * nopr[q] + ) return g2f12 @@ -972,7 +990,7 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -994,7 +1012,9 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): m, n, noqs = g12.shape if norms is None: g2f12 = np.array([np.nanmean(g12.diagonal(i), axis=1) for i in range(m)]) - g2f12_error = np.array([np.std(g12.diagonal(i), axis=1) / np.sqrt(m - i) for i in range(m)]) + g2f12_error = np.array( + [np.std(g12.diagonal(i), axis=1) / np.sqrt(m - i) for i in range(m)] + ) # propagate error to the last point g2f12_error[-1, :] = g2f12_error[-2, :] @@ -1007,11 +1027,18 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): g2f12_error = np.zeros([m, noqs]) for q in range(noqs): yn = norms[:, q] - scale = np.array([np.mean(yn[i:]) * np.mean(yn[: m - i]) * nopr[q] for i in range(m)]) - g2f12[:, q] = np.array([np.nanmean(g12[:, :, q].diagonal(i)) / scale[i] for i in range(m)]) + scale = np.array( + [np.mean(yn[i:]) * np.mean(yn[: m - i]) * nopr[q] for i in range(m)] + ) + g2f12[:, q] = np.array( + [np.nanmean(g12[:, :, q].diagonal(i)) / scale[i] for i in range(m)] + ) g2f12_error[:, q] = np.array( - [np.std(g12[:, :, q].diagonal(i)) / np.sqrt(m - i) / scale[i] for i in range(m)] + [ + np.std(g12[:, :, q].diagonal(i)) / np.sqrt(m - i) / scale[i] + for i in range(m) + ] ) # propagate error to the last point g2f12_error[-1, :] = g2f12_error[-2, :] @@ -1023,7 +1050,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): """ Dec 16, 2015, Y.G.@CHX Get four-time correlation function from two correlation function - namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion + namely, calculate the deviation of each diag line of g12 to get four-time correlation function TOBEDONE: deal with bad frames Parameters: @@ -1031,7 +1058,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): Options: g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q) - one-time correlation fucntion, for normalization of the four-time + one-time correlation function, for normalization of the four-time rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end] Return: @@ -1049,11 +1076,18 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): else: norm = 1.0 if rois is None: - g4f12 = np.array([(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)] + ) else: x1, x2, y1, y2 = rois - g4f12 = np.array([(np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [ + (np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm + for i in range(m) + ] + ) return g4f12 @@ -1318,7 +1352,9 @@ def show_C12( fig, ax = fig_ax # extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] - extent = np.array([N1, N2, N1, N2]) * timeperframe + timeoffset ### added timeoffset to extend + extent = ( + np.array([N1, N2, N1, N2]) * timeperframe + timeoffset + ) ### added timeoffset to extend if logs: im = imshow( diff --git a/pyCHX/v2/_commonspeckle/XPCS_GiSAXS.py b/pyCHX/v2/_commonspeckle/XPCS_GiSAXS.py index 2bf9974..62bb6f7 100644 --- a/pyCHX/v2/_commonspeckle/XPCS_GiSAXS.py +++ b/pyCHX/v2/_commonspeckle/XPCS_GiSAXS.py @@ -4,18 +4,19 @@ This module is for the GiSAXS XPCS analysis """ -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) from pyCHX.v2._commonspeckle.chx_compress import ( # common Multifile, compress_eigerdata, get_avg_imgc, - init_compress_eigerdata, - read_compressed_eigerdata, ) from pyCHX.v2._commonspeckle.chx_correlationc import cal_g2c # common from pyCHX.v2._commonspeckle.chx_generic_functions import * # common -from pyCHX.v2._commonspeckle.chx_libs import colors, colors_, markers, markers_ # common +from pyCHX.v2._commonspeckle.chx_libs import colors, markers # common def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None): @@ -23,9 +24,9 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) Get xpcs roi of gisaxs by giving Qr centers/edges, Qz centers/edges Parameters: qr_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ]. - each elment has two values for the start and end of one qr edge + each element has two values for the start and end of one qr edge qz_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ] - each elment has two values for the start and end of one qz edge + each element has two values for the start and end of one qz edge qr_map: two-d array, the same shape as gisaxs frame, a qr map qz_map: two-d array, the same shape as gisaxs frame, a qz map mask: array, the scattering mask @@ -44,14 +45,18 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) qz_center = 0.5 * (qz_edge[:, 0] + qz_edge[:, 1]) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -76,14 +81,18 @@ def get_gisaxs_roi(Qr, Qz, qr_map, qz_map, mask=None, qval_dict=None): qz_edge, qz_center = get_qedge(*Qz) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -116,7 +125,7 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_qr( avg_imgr, Qr, Qz, qr, qz, new_mask) """ @@ -133,7 +142,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -142,7 +153,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ = data * label_array_qzr qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -150,9 +163,13 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ave = np.interp(qr_ave_intp, qr_ave, data_ave) # columns.append( ['qr%s'%i, str(round(qzc_,4))] ) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # df = DataFrame( df ) # df.columns = np.concatenate( columns ) @@ -182,7 +199,7 @@ def cal_1d_qr( Dec 16, 2016, Y.G.@CHX calculate one-d of I(q) as a function of qr for different qz data: a dataframe - Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the defination of qr range (qr number does not matter) + Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the definition of qr range (qr number does not matter) Qz: info for qz, = qz_start, qz_end, qz_width , qz_num qr: qr-map qz: qz-map @@ -210,7 +227,7 @@ def cal_1d_qr( Qr = [qr_start , qr_end, qr_width, qr_num] Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask) @@ -232,7 +249,9 @@ def cal_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -247,7 +266,9 @@ def cal_1d_qr( qr_ave = (np.sum(qr_, axis=0))[w] / roi_pixel_num[w] data_ave = (np.sum(data_, axis=0))[w] / roi_pixel_num[w] - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) columns.append(["qr"]) @@ -257,7 +278,9 @@ def cal_1d_qr( # qr_1d[i]= [qr_ave_intp, data_ave] columns.append(["qz%s=%s" % (i, str(round(qzc_, 4)))]) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: df = np.hstack([df, data_ave.reshape(N_interp, 1)]) df = DataFrame(df) @@ -271,11 +294,26 @@ def cal_1d_qr( filename = os.path.join(path, "%s_qr_1d.csv" % (uid)) df.to_csv(filename) if print_save_message: - print("The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid) + ) return df -def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, save=True, *argv, **kwargs): +def get_t_qrc( + FD, + frame_edge, + Qr, + Qz, + qr, + qz, + mask=None, + path=None, + uid=None, + save=True, + *argv, + **kwargs, +): """Get t-dependent qr Parameters @@ -327,11 +365,15 @@ def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, sa uid = setup_pargs["uid"] filename = os.path.join(path, "%s_qrt_pds.csv" % (uid)) qrt_pds.to_csv(filename) - print("The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid)) + print( + "The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid) + ) return qrt_pds -def plot_qrt_pds(qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs): +def plot_qrt_pds( + qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs +): """Y.G. Jan 04, 2017 plot t-dependent qr @@ -388,7 +430,7 @@ def plot_t_qrc(qr_1d, frame_edge, save=False, pargs=None, fontsize=8, *argv, **k qr_1d: array, with shape as time length, frame_edge frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ] save: save the plot - if save, all the following paramters are given in argv + if save, all the following parameters are given in argv { 'path': 'uid': } @@ -458,7 +500,9 @@ def make_gisaxs_grid(qr_w=10, qz_w=12, dim_r=100, dim_z=120): ########################################### -def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum"): +def convert_Qmap( + img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum" +): """Y.G. Nov 3@CHX Convert a scattering image to a qmap by giving qx_map and qy_map Return converted qmap, x-coordinates and y-coordinates @@ -475,9 +519,18 @@ def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, st else: m = None b2d = BinnedStatistic2D( - qx_map.ravel(), qy_map.ravel(), statistic=statistic, bins=bins, mask=m, range=rangeq + qx_map.ravel(), + qy_map.ravel(), + statistic=statistic, + bins=bins, + mask=m, + range=rangeq, + ) + remesh_data, xbins, ybins = ( + b2d(img.ravel()), + b2d.bin_centers[0], + b2d.bin_centers[1], ) - remesh_data, xbins, ybins = b2d(img.ravel()), b2d.bin_centers[0], b2d.bin_centers[1] else: if rangeq is None: qx_min, qx_max = qx_map.min(), qx_map.max() @@ -515,7 +568,14 @@ def get_refl_xy(inc_ang, inc_phi, inc_x0, inc_y0, pixelsize=[0.075, 0.075], Lsd= def get_alphaf_thetaf( - inc_x0, inc_y0, inc_ang, inc_phi=0, pixelsize=[0.075, 0.075], Lsd=5000, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + inc_ang, + inc_phi=0, + pixelsize=[0.075, 0.075], + Lsd=5000, + dimx=2070.0, + dimy=2167.0, ): """Nov 19, 2018@SMI to get alphaf and thetaf for gi scattering Input: @@ -543,7 +603,7 @@ def convert_gisaxs_pixel_to_q2( alphaf, thetaf, phi=0, - lamda=1.0, + lambda=1.0, thetai=0.0, ): """ @@ -558,7 +618,7 @@ def convert_gisaxs_pixel_to_q2( get: q_parallel (qp), q_direction_z (qz) """ - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda alphai = np.radians(inc_ang) thetai = np.radians(thetai) phi = np.radians(phi) @@ -585,7 +645,9 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls Lsd = Lsd / 1000.0 px, py = pixelsize - phi = np.arctan2((-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6)) + phi = np.arctan2( + (-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6) + ) alphai = np.arctan2((refl_y0 - inc_y0) * py * 10 ** (-6), Lsd) / 2.0 # thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #?? @@ -593,7 +655,15 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls def get_reflected_angles( - inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + thetai=0.0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, ): """Dec 16, 2015, Y.G.@CHX giving: incident beam center: bcenx,bceny @@ -619,7 +689,16 @@ def get_reflected_angles( def convert_gisaxs_pixel_to_q( - inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0, thetai=0.0, lamda=1.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, + thetai=0.0, + lambda=1.0, ): """ Dec 16, 2015, Y.G.@CHX @@ -636,7 +715,7 @@ def convert_gisaxs_pixel_to_q( alphaf, thetaf, alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd, dimx, dimy ) - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda qx = np.cos(alphaf) * np.cos(2 * thetaf) - np.cos(alphai) * np.cos(2 * thetai) qy_ = np.cos(alphaf) * np.sin(2 * thetaf) - np.cos(alphai) * np.sin(2 * thetai) qz_ = np.sin(alphaf) + np.sin(alphai) @@ -655,7 +734,6 @@ def get_qedge(qstart, qend, qwidth, noqs, verbose=True): return a qedge by giving the noqs, qstart,qend,qwidth. a qcenter, which is center of each qedge KEYWORD: None""" - import numpy as np if noqs != 1: spacing = (qend - qstart - noqs * qwidth) / (noqs - 1) # spacing between rings @@ -735,13 +813,23 @@ def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center): for i, label in enumerate(uqzr): # print (i, label) - label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[i] + label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[ + i + ] return np.int_(label_array_qzr_), np.array(qzc), np.concatenate(np.array(qrc)) def show_label_array_on_image( - ax, image, label_array, cmap=None, norm=None, log_img=True, alpha=0.3, imshow_cmap="gray", **kwargs + ax, + image, + label_array, + cmap=None, + norm=None, + log_img=True, + alpha=0.3, + imshow_cmap="gray", + **kwargs, ): # norm=LogNorm(), """ This will plot the required ROI's(labeled array) on the image @@ -771,9 +859,13 @@ def show_label_array_on_image( """ ax.set_aspect("equal") if log_img: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs + ) # norm=norm, else: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs + ) # norm=norm, im_label = mpl_plot.show_label_array( ax, label_array, cmap=cmap, norm=norm, alpha=alpha, **kwargs @@ -784,7 +876,7 @@ def show_label_array_on_image( def show_qz(qz): """Dec 16, 2015, Y.G.@CHX - plot qz mape + plot qz map """ @@ -797,7 +889,7 @@ def show_qz(qz): def show_qr(qr): """Dec 16, 2015, Y.G.@CHX - plot qr mape + plot qr map """ fig, ax = plt.subplots() @@ -811,12 +903,14 @@ def show_alphaf( alphaf, ): """Dec 16, 2015, Y.G.@CHX - plot alphaf mape + plot alphaf map """ fig, ax = plt.subplots() - im = ax.imshow(alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5) + im = ax.imshow( + alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5 + ) # im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00)) fig.colorbar(im) ax.set_title("alphaf") @@ -877,7 +971,7 @@ def get_1d_qr( Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8) @@ -898,7 +992,9 @@ def get_1d_qr( if show_roi: label_array_qz0 = get_qmap_label(qz, qz_edge) - label_array_qzr0, qzc0, qrc0 = get_qzrmap(label_array_qz0, label_array_qr, qz_center, qr_center) + label_array_qzr0, qzc0, qrc0 = get_qzrmap( + label_array_qz0, label_array_qr, qz_center, qr_center + ) if mask is not None: label_array_qzr0 *= mask @@ -912,7 +1008,9 @@ def get_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -922,7 +1020,9 @@ def get_1d_qr( qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -937,9 +1037,13 @@ def get_1d_qr( else: ax.plot(qr_ave_intp, data_ave, "--o", label="qz= %f" % qzc_) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # ax.set_xlabel( r'$q_r$', fontsize=15) ax.set_xlabel(r"$q_r$" r"($\AA^{-1}$)", fontsize=18) @@ -960,7 +1064,9 @@ def get_1d_qr( # filename = os.path.join(path, 'qr_1d-%s-%s.csv' % (uid,CurTime)) filename = os.path.join(path, "uid=%s--qr_1d.csv" % (uid)) df.to_csv(filename) - print("The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid) + ) # fp = path + 'Uid= %s--Circular Average'%uid + CurTime + '.png' fp = path + "uid=%s--qr_1d-" % uid + ".png" @@ -1082,13 +1188,17 @@ def get_qr_tick_label(qr, label_array_qr, inc_x0, interp=True): rticks_label = np.array(rticks_label) try: w = np.where(rticks <= inc_x0)[0] - rticks1 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks1 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks_label1 = np.round(rticks_label[w], 3) except: rticks_label1 = [] try: w = np.where(rticks > inc_x0)[0] - rticks2 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks2 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks = np.append(rticks1, rticks2) rticks_label2 = np.round(rticks_label[w], 3) except: @@ -1134,7 +1244,17 @@ def get_qz_tick_label(qz, label_array_qz, interp=True): return zticks, zticks_label -def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_label=True, *argv, **kwargs): +def get_qzr_map( + qr, + qz, + inc_x0, + Nzline=10, + Nrline=10, + interp=True, + return_qrz_label=True, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX Calculate a qzr map of a gisaxs image (data) without plot @@ -1154,16 +1274,20 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space else: return the additional two below - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image Examples: ticks = get_qzr_map( qr, qz, inc_x0 ) """ qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1175,18 +1299,41 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) ticks = [zticks, zticks_label, rticks, rticks_label] if return_qrz_label: - return zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz + return ( + zticks, + zticks_label, + rticks, + rticks_label, + label_array_qr, + label_array_qz, + ) else: return zticks, zticks_label, rticks, rticks_label -def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin=0.001, vmax=1e1, *argv, **kwargs): +def plot_qzr_map( + qr, + qz, + inc_x0, + ticks=None, + data=None, + uid="uid", + path="", + vmin=0.001, + vmax=1e1, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1201,8 +1348,8 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin zticks_label: list, z-tick positions in unit of real space rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image inc_x0: the incident beam center x Options: @@ -1225,11 +1372,13 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin import matplotlib.pyplot as plt if ticks is None: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = get_qzr_map( - qr, qz, inc_x0, return_qrz_label=True + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + get_qzr_map(qr, qz, inc_x0, return_qrz_label=True) ) else: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ticks + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + ticks + ) cmap = "viridis" _cmap = copy.copy((mcm.get_cmap(cmap))) @@ -1239,7 +1388,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1268,7 +1419,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin fig.savefig(fp, dpi=fig.dpi) -def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs): +def show_qzr_map( + qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs +): """ Dec 16, 2015, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1308,8 +1461,12 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1325,7 +1482,9 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1334,7 +1493,7 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * label_array_qz, origin="lower", cmap="viridis", vmin=0.5, vmax=None ) # ,interpolation='nearest',) - # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, heigth + # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, height # cba = fig.colorbar(im, cax=caxr ) # cba = fig.colorbar(im, fraction=0.046, pad=0.04) @@ -1351,9 +1510,13 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) stride = 1 @@ -1400,7 +1563,7 @@ def show_qzr_roi( save=False, return_fig=False, *argv, - **kwargs + **kwargs, ): """ Dec 16, 2015, Y.G.@CHX @@ -1648,7 +1811,9 @@ def plot_gisaxs_g2(g2, taus, res_pargs=None, one_plot=False, *argv, **kwargs): # plot g2 results -def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs): +def plot_gisaxs_two_g2( + g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX plot g2 results, g2: one-time correlation function from a multi-tau method @@ -1804,7 +1969,9 @@ def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *ar # plt.show() -def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs): +def save_gisaxs_g2( + g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs +): """ Aug 8, 2016, Y.G.@CHX save g2 results, @@ -1843,14 +2010,23 @@ def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *a if filename is None: if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) else: filename = os.path.join(path, filename) df.to_csv(filename) - print("The correlation function of uid= %s is saved with filename as %s" % (uid, filename)) + print( + "The correlation function of uid= %s is saved with filename as %s" + % (uid, filename) + ) def stretched_auto_corr_scat_factor(x, beta, relaxation_rate, alpha=1.0, baseline=1): @@ -1861,7 +2037,9 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): return beta * np.exp(-2 * relaxation_rate * x) + baseline -def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs): +def fit_gisaxs_g2( + g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs +): """ July 20,2016, Y.G.@CHX Fit one-time correlation function @@ -1886,12 +2064,12 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1921,7 +2099,9 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) @@ -1966,9 +2146,16 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, baseline_ = kwargs["guess_values"]["baseline"] else: baseline_ = 1.0 - pars = mod.make_params(beta=beta_, alpha=alpha_, relaxation_rate=relaxation_rate_, baseline=baseline_) + pars = mod.make_params( + beta=beta_, + alpha=alpha_, + relaxation_rate=relaxation_rate_, + baseline=baseline_, + ) else: - pars = mod.make_params(beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0) + pars = mod.make_params( + beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0 + ) for v in _vars: pars["%s" % v].vary = False @@ -2034,12 +2221,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ax.set_xlim(kwargs["xlim"]) txts = r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$\alpha$" + r"$ = %.3f$" % (alpha[i]) # txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' - ax.text(x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$baseline$" + r"$ = %.3f$" % (baseline[i]) - ax.text(x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) fp = path + "uid=%s--g2-qz=%s--fit" % (uid, qz_center[qz_ind]) + ".png" @@ -2105,8 +2298,20 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, # print( result1.best_values['relaxation_rate'], result1.best_values['beta'] ) - txts = r"$q_z$" + r"$_%s$" % qz_ind + r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3 - 0.1 * qz_ind, s=txts, fontsize=14, transform=ax.transAxes) + txts = ( + r"$q_z$" + + r"$_%s$" % qz_ind + + r"$\tau$" + + r"$ = %.3f$" % (1 / rate[i]) + + r"$ s$" + ) + ax.text( + x=0.02, + y=0.55 + 0.3 - 0.1 * qz_ind, + s=txts, + fontsize=14, + transform=ax.transAxes, + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -2144,14 +2349,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ############################### -def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs): +def get_each_box_mean_intensity( + data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX get each box (ROI) mean intensity as a function of time """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), box_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), box_mask + ) try: N = len(data_series) except: @@ -2166,7 +2375,13 @@ def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, p ax.set_title("uid= %s--Mean intensity of each box" % uid) for i in range(num_rings): - ax.plot(times[::sampling], mean_int_sets[:, i], label="Box " + str(i + 1), marker="o", ls="-") + ax.plot( + times[::sampling], + mean_int_sets[:, i], + label="Box " + str(i + 1), + marker="o", + ls="-", + ) ax.set_xlabel("Time") ax.set_ylabel("Mean Intensity") ax.legend() @@ -2250,14 +2465,22 @@ def fit_qr_qz_rate(qr, qz, rate, plot_=True, *argv, **kwargs): ax.plot(x**power, res[i].best_fit, "-r") txts = r"$D0: %.3e$" % D0[i] + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) legend = ax.legend(loc="best") ax.set_ylabel("Relaxation rate " r"$\gamma$" "($s^{-1}$)") ax.set_xlabel("$q^%s$" r"($\AA^{-2}$)" % power) dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) # fp = path + 'Q%s-Rate--uid=%s'%(power,uid) + CurTime + '--Fit.png' fp = path + "uid=%s--Q-Rate" % (uid) + "--fit-.png" fig.savefig(fp, dpi=fig.dpi) @@ -2503,7 +2726,9 @@ def multi_uids_gisaxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms Ldet = md["detector_distance"] # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs exposuretime = md["count_time"] @@ -2512,7 +2737,12 @@ def multi_uids_gisaxs_xpcs_analysis( # timeperframe = exposuretime#for visiblitly # timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata.... setup_pargs = dict( - uid=uid, dpix=dpix, Ldet=Ldet, lambda_=lambda_, timeperframe=timeperframe, path=data_dir + uid=uid, + dpix=dpix, + Ldet=Ldet, + lambda_=lambda_, + timeperframe=timeperframe, + path=data_dir, ) md["avg_img"] = avg_imgr @@ -2527,17 +2757,34 @@ def multi_uids_gisaxs_xpcs_analysis( else: good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) if not para_run: g2, lag_steps_ = cal_g2c( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) else: g2, lag_steps_ = cal_g2p( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) if len(lag_steps) < len(lag_steps_): @@ -2550,7 +2797,11 @@ def multi_uids_gisaxs_xpcs_analysis( good_start = 0 good_series = apply_mask(imgsar[good_start:], maskr) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2559,14 +2810,25 @@ def multi_uids_gisaxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, box_maskr, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + box_maskr, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, qz_center=qz_center, qr_center=qr_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, + qz_center=qz_center, + qr_center=qr_center, + path=data_dir_, + uid=uid, + ) save_gisaxs_g2(g2, res_pargs) # plot_gisaxs_g2( g2, taus, vlim=[0.95, 1.1], res_pargs=res_pargs, one_plot=True) @@ -2576,14 +2838,33 @@ def multi_uids_gisaxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.1], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.229, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.229, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, one_plot=True, ) - fit_qr_qz_rate(qr_center, qz_center, fit_result, power_variable=False, uid=uid, path=data_dir_) + fit_qr_qz_rate( + qr_center, + qz_center, + fit_result, + power_variable=False, + uid=uid, + path=data_dir_, + ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 diff --git a/pyCHX/v2/_commonspeckle/XPCS_SAXS.py b/pyCHX/v2/_commonspeckle/XPCS_SAXS.py index f400771..3379b6b 100644 --- a/pyCHX/v2/_commonspeckle/XPCS_SAXS.py +++ b/pyCHX/v2/_commonspeckle/XPCS_SAXS.py @@ -7,20 +7,13 @@ import os from pandas import DataFrame -from scipy.special import erf from pyCHX.v2._commonspeckle.chx_compress_analysis import ( # common Multifile, compress_eigerdata, get_avg_imgc, - get_each_ring_mean_intensityc, - init_compress_eigerdata, - mean_intensityc, - read_compressed_eigerdata, ) from pyCHX.v2._commonspeckle.chx_correlationc import ( # common - Get_Pixel_Arrayc, - auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq, ) @@ -30,11 +23,7 @@ RUN_GUI, Figure, colors, - colors_, - colors_copy, markers, - markers_, - markers_copy, ) @@ -125,9 +114,9 @@ def recover_img_from_iq(qp, iq, center, mask): return img_ -def get_cirucular_average_std(img, mask, setup_pargs, img_name="xx"): +def get_circular_average_std(img, mask, setup_pargs, img_name="xx"): """YG. develop at CHX, 2017 July 18, - Get the standard devation of tge circular average of img + Get the standard deviation of the circular average of img image-->I(q)-->image_mean--> (image- image_mean)**2 --> I(q) --> std = sqrt(I(q)) """ qp, iq, q = get_circular_average(img, mask, pargs=setup_pargs, save=False) @@ -146,7 +135,15 @@ def get_delta_img(img, mask, setup_pargs, img_name="xx", plot=False): img_ = recover_img_from_iq(qp, iq, center, mask) delta = img - img_ * img.mean() / img_.mean() if plot: - show_img(delta, logs=True, aspect=1, cmap=cmap_albula, vmin=1e-5, vmax=10**1, image_name=img_name) + show_img( + delta, + logs=True, + aspect=1, + cmap=cmap_albula, + vmin=1e-5, + vmax=10**1, + image_name=img_name, + ) return delta @@ -181,7 +178,9 @@ def combine_ring_anglar_mask(ring_mask, ang_mask): return np.int_(ring_ang_) -def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -208,7 +207,9 @@ def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, ce return seg_mask, qval_dict -def get_seg_dict_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_dict_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -261,7 +262,11 @@ def combine_two_roi_mask(ring_mask, ang_mask, pixel_num_thres=10): for i, ind in enumerate(ruiq[1:]): ring_mask_.ravel()[np.where(rf == ind)[0]] = maxa * i - new_mask = (ring_mask_ + ang_mask) * np.array(ring_mask, dtype=bool) * np.array(ang_mask, dtype=bool) + new_mask = ( + (ring_mask_ + ang_mask) + * np.array(ring_mask, dtype=bool) + * np.array(ang_mask, dtype=bool) + ) qind, pixelist = roi.extract_label_indices(new_mask) noqs = len(np.unique(qind)) @@ -347,7 +352,14 @@ def bin_1D(x, y, nx=None, min_x=None, max_x=None): def circular_average( - image, calibrated_center, threshold=0, nx=None, pixel_size=(1, 1), min_x=None, max_x=None, mask=None + image, + calibrated_center, + threshold=0, + nx=None, + pixel_size=(1, 1), + min_x=None, + max_x=None, + mask=None, ): """Circular average of the the image data The circular average is also known as the radial integration @@ -431,7 +443,7 @@ def get_circular_average( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """get a circular average of an image Parameters @@ -449,8 +461,8 @@ def get_circular_average( number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -461,10 +473,22 @@ def get_circular_average( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] qp, iq = circular_average( - avg_img, center, threshold=0, nx=nx, pixel_size=(dpix, dpix), mask=mask, min_x=min_x, max_x=max_x + avg_img, + center, + threshold=0, + nx=nx, + pixel_size=(dpix, dpix), + mask=mask, + min_x=min_x, + max_x=max_x, ) qp_ = qp * dpix # convert bin_centers from r [um] to two_theta and then to q [1/px] (reciprocal space) @@ -506,12 +530,23 @@ def get_circular_average( fig.savefig(fp, dpi=fig.dpi) if save: path = pargs["path"] - save_lists([q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path) + save_lists( + [q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path + ) return qp, iq, q def plot_circular_average( - qp, iq, q, pargs, show_pixel=False, loglog=False, save=True, return_fig=False, *argv, **kwargs + qp, + iq, + q, + pargs, + show_pixel=False, + loglog=False, + save=True, + return_fig=False, + *argv, + **kwargs, ): if RUN_GUI: fig = Figure() @@ -560,7 +595,18 @@ def plot_circular_average( return fig -def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False, save=False, *argv, **kwargs): +def get_angular_average( + avg_img, + mask, + pargs, + min_r, + max_r, + nx=3600, + plot_=False, + save=False, + *argv, + **kwargs, +): """get a angular average of an image Parameters ---------- @@ -577,8 +623,8 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -589,11 +635,22 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] angq, ang = angular_average( - avg_img, calibrated_center=center, pixel_size=(dpix, dpix), nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + calibrated_center=center, + pixel_size=(dpix, dpix), + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -678,7 +735,8 @@ def angular_average( min_r = 0 if max_r is None: max_r = np.sqrt( - (image.shape[0] - calibrated_center[0]) ** 2 + (image.shape[1] - calibrated_center[1]) ** 2 + (image.shape[0] - calibrated_center[0]) ** 2 + + (image.shape[1] - calibrated_center[1]) ** 2 ) r_mask = make_ring_mask(calibrated_center, image.shape, min_r, max_r) @@ -693,7 +751,9 @@ def angular_average( bina = np.ravel(angle_val) image_mask = np.ravel(image * r_mask) - bin_edges, sums, counts = utils.bin_1D(bina, image_mask, nx, min_x=min_x, max_x=max_x) + bin_edges, sums, counts = utils.bin_1D( + bina, image_mask, nx, min_x=min_x, max_x=max_x + ) # print (counts) th_mask = counts > threshold @@ -704,7 +764,18 @@ def angular_average( return bin_centers * 180 / np.pi, ang_averages -def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, show_progress=True, *argv, **kwargs): +def get_t_iqc( + FD, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + show_progress=True, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -716,7 +787,7 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- qp: q in pixel @@ -730,7 +801,9 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho for i in range(Nt): t1, t2 = frame_edge[i] # print (t1,t2) - avg_img = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + avg_img = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) qp, iqs[i], q = get_circular_average(avg_img, mask, pargs, nx=nx, plot_=False) if plot_: @@ -775,7 +848,17 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho return qp, np.array(iqs), q -def plot_t_iqc(q, iqs, frame_edge, pargs, save=True, return_fig=False, legend_size=None, *argv, **kwargs): +def plot_t_iqc( + q, + iqs, + frame_edge, + pargs, + save=True, + return_fig=False, + legend_size=None, + *argv, + **kwargs, +): """Plot t-dependent Iq Parameters @@ -853,7 +936,17 @@ def calc_q(L, a, wv): return q -def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, *argv, **kwargs): +def get_t_iq( + data_series, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -865,7 +958,7 @@ def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=Fa nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -929,7 +1022,7 @@ def get_t_ang( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """Get t-dependent angule intensity @@ -954,7 +1047,7 @@ def get_t_ang( nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -971,7 +1064,13 @@ def get_t_ang( # print (t1,t2) avg_img = get_avg_img(data_series[t1:t2], sampling=1, plot_=False) qp, iqs[i] = angular_average( - avg_img, center, pixel_size=pixel_size, nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + center, + pixel_size=pixel_size, + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -1061,7 +1160,7 @@ def _make_roi(coords, edges, shape): def angulars(edges, center, shape): """ - Draw annual (angluar-shaped) shaped regions of interest. + Draw annual (angular-shaped) shaped regions of interest. Each ring will be labeled with an integer. Regions outside any ring will be filled with zeros. Parameters @@ -1085,7 +1184,8 @@ def angulars(edges, center, shape): edges = np.atleast_2d(np.asarray(edges)).ravel() if not 0 == len(edges) % 2: raise ValueError( - "edges should have an even number of elements, " "giving inner, outer radii for each angular" + "edges should have an even number of elements, " + "giving inner, outer radii for each angular" ) if not np.all(np.diff(edges) > 0): raise ValueError( @@ -1118,9 +1218,9 @@ def update_angular_mask_width_edge(edge, mask, center, roi_mask): return roi_mask -def fix_angle_mask_at_PN_180(edge, mask, center, roi_mask): +def fix_angle_mask_at_ON_180(edge, mask, center, roi_mask): """YG Dev@CHX May, 2019 - to fix the problem of making angluar mask at the angle edge around +/- 180 + to fix the problem of making angular mask at the angle edge around +/- 180 Input: edge: the edge of the anglues mask: the mask of the image @@ -1206,7 +1306,9 @@ def get_angular_mask( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1220,18 +1322,18 @@ def get_angular_mask( edges2 = edges - 180 for edge_ in [edges2]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) if flow_angle is not None: edges3 = 2 * flow_angle - edges[:, ::-1] edges4 = 2 * flow_angle - edges[:, ::-1] - 180 for edge_ in [edges3, edges4]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) else: # for i, edge_ in enumerate( edges ): # print(edge_) if fix_180_angle: - ang_mask = fix_angle_mask_at_PN_180(edges, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edges, mask, center, ang_mask) labels, indices = roi.extract_label_indices(ang_mask) nopr = np.bincount(np.array(labels, dtype=int))[1:] if len(np.where(nopr == 0)[0] != 0): @@ -1280,7 +1382,9 @@ def get_angular_mask_old( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1374,7 +1478,12 @@ def get_ring_mask( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) # spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings # qc = np.int_( np.linspace( inner_radius,outer_radius, num_rings ) ) @@ -1387,7 +1496,9 @@ def get_ring_mask( # find the edges of the required rings if edges is None: if num_rings != 1: - spacing = (outer_radius - inner_radius - num_rings * width) / (num_rings - 1) # spacing between rings + spacing = (outer_radius - inner_radius - num_rings * width) / ( + num_rings - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_radius, width, spacing, num_rings) @@ -1538,7 +1649,15 @@ def show_ring_ang_roi(data, rois, alpha=0.3, save=False, *argv, **kwargs): def plot_qIq_with_ROI( - q, iq, q_ring_center, q_ring_edge=None, logs=True, save=False, return_fig=False, *argv, **kwargs + q, + iq, + q_ring_center, + q_ring_edge=None, + logs=True, + save=False, + return_fig=False, + *argv, + **kwargs, ): """Aug 6, 2016, Y.G.@CHX Update@2019, March to make a span plot with q_ring_edge @@ -1592,12 +1711,21 @@ def plot_qIq_with_ROI( def get_each_ring_mean_intensity( - data_series, ring_mask, sampling, timeperframe, plot_=True, save=False, *argv, **kwargs + data_series, + ring_mask, + sampling, + timeperframe, + plot_=True, + save=False, + *argv, + **kwargs, ): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), ring_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), ring_mask + ) times = np.arange(len(data_series)) * timeperframe # get the time for each frame num_rings = len(np.unique(ring_mask)[1:]) @@ -1628,7 +1756,9 @@ def get_each_ring_mean_intensity( # plot g2 results -def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs): +def plot_saxs_rad_ang_g2( + g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs +): """plot g2 results of segments with radius and angle partation , g2: one-time correlation function @@ -1750,7 +1880,13 @@ def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, retu def fit_saxs_rad_ang_g2( - g2, res_pargs=None, function="simple_exponential", fit_range=None, master_angle_plot=False, *argv, **kwargs + g2, + res_pargs=None, + function="simple_exponential", + fit_range=None, + master_angle_plot=False, + *argv, + **kwargs, ): """ Fit one-time correlation function @@ -1767,7 +1903,7 @@ def fit_saxs_rad_ang_g2( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline #fit_vibration: @@ -1775,7 +1911,7 @@ def fit_saxs_rad_ang_g2( Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1839,13 +1975,17 @@ def fit_saxs_rad_ang_g2( if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) @@ -1870,7 +2010,9 @@ def fit_saxs_rad_ang_g2( _alpha = _guess_val["alpha"] _relaxation_rate = _guess_val["relaxation_rate"] _baseline = _guess_val["baseline"] - pars = mod.make_params(beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline) + pars = mod.make_params( + beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline + ) if function == "flow_para_function" or function == "flow_para": _flow_velocity = _guess_val["flow_velocity"] @@ -1886,7 +2028,12 @@ def fit_saxs_rad_ang_g2( _freq = _guess_val["freq"] _amp = _guess_val["amp"] pars = mod.make_params( - beta=_beta, alpha=_alpha, freq=_freq, amp=_amp, relaxation_rate=_relaxation_rate, baseline=_baseline + beta=_beta, + alpha=_alpha, + freq=_freq, + amp=_amp, + relaxation_rate=_relaxation_rate, + baseline=_baseline, ) for v in _vars: @@ -1981,7 +2128,9 @@ def fit_saxs_rad_ang_g2( if function == "flow_para_function" or function == "flow_para": txts = r"$flow_v$" + r"$ = %.3f$" % (flow[i]) - ax.text(x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -2000,7 +2149,9 @@ def fit_saxs_rad_ang_g2( result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) if function == "flow_para_function" or function == "flow_para": - result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow) + result = dict( + beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow + ) if function == "stretched_vibration": result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, freq=freq) @@ -2036,7 +2187,13 @@ def save_seg_saxs_g2(g2, res_pargs, time_label=True, *argv, **kwargs): if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) @@ -2185,8 +2342,12 @@ def multi_uids_saxs_flow_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2218,8 +2379,12 @@ def multi_uids_saxs_flow_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " % good_start + ) print("The good_end frame number is: %s " % good_end_) norm = None @@ -2245,7 +2410,13 @@ def multi_uids_saxs_flow_xpcs_analysis( ) else: g2, lag_stepsv = cal_g2p( - FD, seg_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + seg_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_stepsv): @@ -2258,7 +2429,14 @@ def multi_uids_saxs_flow_xpcs_analysis( path=data_dir_, uid=uid + "_1a_mq%s" % conf, ) - save_g2(g2, taus=taus, qr=rcen, qz=acen, uid=uid + "_1a_mq%s" % conf, path=data_dir_) + save_g2( + g2, + taus=taus, + qr=rcen, + qz=acen, + uid=uid + "_1a_mq%s" % conf, + path=data_dir_, + ) if nconf == 0: g2s[run_seq + 1][i]["v"] = g2 # perpendular @@ -2367,7 +2545,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) dfv = save_g2_fit_para_tocsv( - g2_fit_result, filename=uid + "_1a_mq" + conf + "_fit_para", path=data_dir_ + g2_fit_result, + filename=uid + "_1a_mq" + conf + "_fit_para", + path=data_dir_, ) fit_q_rate( @@ -2379,7 +2559,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) # psave_obj( fit_result, data_dir_ + 'uid=%s-g2-fit-para'%uid ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters FD = 0 avg_img, imgsum, bad_frame_list = [0, 0, 0] @@ -2504,8 +2686,12 @@ def multi_uids_saxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2537,8 +2723,13 @@ def multi_uids_saxs_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) hmask = create_hot_pixel_mask(avg_img, 1e8) @@ -2556,11 +2747,23 @@ def multi_uids_saxs_xpcs_analysis( norm = get_pixelist_interp_iq(qp, iq, ring_mask, center) if not para_run: g2, lag_steps_ = cal_g2c( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) else: g2, lag_steps_ = cal_g2p( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_steps_): @@ -2580,7 +2783,11 @@ def multi_uids_saxs_xpcs_analysis( good_series = apply_mask(imgsa[good_start:], mask) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2589,7 +2796,12 @@ def multi_uids_saxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, ring_mask, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + ring_mask, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ @@ -2597,7 +2809,9 @@ def multi_uids_saxs_xpcs_analysis( taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid + ) save_saxs_g2(g2, res_pargs) # plot_saxs_g2( g2, taus, vlim=[0.95, 1.05], res_pargs=res_pargs) if fit: @@ -2606,15 +2820,31 @@ def multi_uids_saxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.05], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.0, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.0, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, ) fit_q_rate( - q_ring_center[:], fit_result["rate"][:], power_variable=False, uid=uid, path=data_dir_ + q_ring_center[:], + fit_result["rate"][:], + power_variable=False, + uid=uid, + path=data_dir_, ) psave_obj(fit_result, data_dir_ + "uid=%s-g2-fit-para" % uid) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 print("*" * 40) @@ -2626,8 +2856,8 @@ def multi_uids_saxs_xpcs_analysis( def plot_mul_g2(g2s, md): """ Plot multi g2 functions generated by multi_uids_saxs_xpcs_analysis - Will create a large plot with q_number pannels - Each pannel (for each q) will show a number (run number of g2 functions + Will create a large plot with q_number panels + Each panel (for each q) will show a number (run number of g2 functions """ q_ring_center = md["q_ring_center"] @@ -2675,7 +2905,12 @@ def plot_mul_g2(g2s, md): # markersize=6, label = '%s'%sid) ax.semilogx( - taus[1:len_], y[1:len_], marker=markers[i], color=colors[i], markersize=6, label="%s" % sid + taus[1:len_], + y[1:len_], + marker=markers[i], + color=colors[i], + markersize=6, + label="%s" % sid, ) if sn == 0: @@ -2690,13 +2925,15 @@ def get_QrQw_From_RoiMask(roi_mask, setup_pargs): Input: roi_mask: int-type array, 2D roi mask, with q-index starting from 1 setup_pargs: dict, at least with keys as - dpix (det pixel size),lamdba_( wavelength), center( beam center) + dpix (det pixel size),lambda_( wavelength), center( beam center) Output: qr_cen: the q center of each ring qr_wid: the q width of each ring """ - qp_roi, iq_roi, q_roi = get_circular_average(roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs) + qp_roi, iq_roi, q_roi = get_circular_average( + roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs + ) Nmax = roi_mask.max() qr_cen = np.zeros(Nmax) qr_wid = np.zeros(Nmax) diff --git a/pyCHX/v2/_commonspeckle/XPCS_XSVS_SAXS_Multi_2017_V4.py b/pyCHX/v2/_commonspeckle/XPCS_XSVS_SAXS_Multi_2017_V4.py index aa327ae..b232024 100644 --- a/pyCHX/v2/_commonspeckle/XPCS_XSVS_SAXS_Multi_2017_V4.py +++ b/pyCHX/v2/_commonspeckle/XPCS_XSVS_SAXS_Multi_2017_V4.py @@ -2,9 +2,6 @@ # from pyCHX.chx_packages import * #common -from pyCHX.v2._commonspeckle.chx_xpcs_xsvs_jupyter_V1 import ( # common #common added "V1" to import ok - run_xpcs_xsvs_single, -) def XPCS_XSVS_SAXS_Multi( @@ -52,7 +49,9 @@ def XPCS_XSVS_SAXS_Multi( mask_load = mask.copy() username = getpass.getuser() - data_dir0 = os.path.join("/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/") + data_dir0 = os.path.join( + "/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/" + ) os.makedirs(data_dir0, exist_ok=True) print("Results from this analysis will be stashed in the directory %s" % data_dir0) data_dir = os.path.join(data_dir0, uid_average + "/") @@ -94,7 +93,9 @@ def XPCS_XSVS_SAXS_Multi( wat = get_averaged_data_from_multi_res(multi_res, keystr="wat") if run_t_ROI_Inten: times_roi = get_averaged_data_from_multi_res(multi_res, keystr="times_roi") - mean_int_sets = get_averaged_data_from_multi_res(multi_res, keystr="mean_int_sets") + mean_int_sets = get_averaged_data_from_multi_res( + multi_res, keystr="mean_int_sets" + ) if run_one_time: g2 = get_averaged_data_from_multi_res(multi_res, keystr="g2") @@ -126,10 +127,14 @@ def XPCS_XSVS_SAXS_Multi( "relaxation_rate": 0.01, }, ) - g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir) + g2_fit_paras = save_g2_fit_para_tocsv( + g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir + ) if run_two_time: - g12b = get_averaged_data_from_multi_res(multi_res, keystr="g12b", different_length=True) + g12b = get_averaged_data_from_multi_res( + multi_res, keystr="g12b", different_length=True + ) g2b = get_averaged_data_from_multi_res(multi_res, keystr="g2b") tausb = get_averaged_data_from_multi_res(multi_res, keystr="tausb") @@ -162,7 +167,9 @@ def XPCS_XSVS_SAXS_Multi( }, ) - g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir) + g2b_fit_paras = save_g2_fit_para_tocsv( + g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir + ) if run_four_time: g4 = get_averaged_data_from_multi_res(multi_res, keystr="g4") @@ -181,7 +188,9 @@ def XPCS_XSVS_SAXS_Multi( contrast_factorL = get_averaged_data_from_multi_res( multi_res, keystr="contrast_factorL", different_length=False ) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) cont_pds = save_arrays( contrast_factorL, label=times_xsvs, @@ -190,9 +199,15 @@ def XPCS_XSVS_SAXS_Multi( return_res=True, ) if False: - spec_kmean = get_averaged_data_from_multi_res(multi_res, keystr="spec_kmean") - spec_pds = get_averaged_data_from_multi_res(multi_res, keystr="spec_pds", different_length=False) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + spec_kmean = get_averaged_data_from_multi_res( + multi_res, keystr="spec_kmean" + ) + spec_pds = get_averaged_data_from_multi_res( + multi_res, keystr="spec_pds", different_length=False + ) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) spec_his, spec_std = get_his_std_from_pds(spec_pds, his_shapes=None) ML_val, KL_val, K_ = get_xsvs_fit( spec_his, @@ -331,7 +346,9 @@ def XPCS_XSVS_SAXS_Multi( beg=good_start, ) if run_t_ROI_Inten: - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uid, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uid, save=True, path=data_dir + ) if run_one_time: plot_g2_general( @@ -506,7 +523,9 @@ def XPCS_XSVS_SAXS_Multi( for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): Exdt[k] = v if run_two_time: - for k, v in zip(["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b]): + for k, v in zip( + ["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b] + ): Exdt[k] = v if run_four_time: for k, v in zip(["taus4", "g4"], [taus4, g4]): @@ -658,6 +677,8 @@ def XPCS_XSVS_SAXS_Multi( suf_ids[1][i * step : (i + 1) * step], suf_ids[2][i * step : (i + 1) * step], ) - XPCS_XSVS_SAXS_Multi(0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i]) + XPCS_XSVS_SAXS_Multi( + 0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i] + ) run_time(t0) diff --git a/pyCHX/v2/_commonspeckle/chx_Fitters2D.py b/pyCHX/v2/_commonspeckle/chx_Fitters2D.py index 852502e..8be7062 100644 --- a/pyCHX/v2/_commonspeckle/chx_Fitters2D.py +++ b/pyCHX/v2/_commonspeckle/chx_Fitters2D.py @@ -12,7 +12,9 @@ def gauss_func(x, xc, amp, sigma, baseline): def gauss2D_func(x, y, xc, amp, sigmax, yc, sigmay, baseline): return ( - amp * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + amp + * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + baseline ) @@ -75,13 +77,15 @@ def __call__(self, x, y, vx, vy, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys() + ) # assumes first var is dependent var, and save last params V = np.array([vx, vy]) self._res = self.mod.fit(V, x=x, y=y, params=params) @@ -142,7 +146,7 @@ def guess(self, **kwargs): if kwargs is not None: for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] return paramsdict @@ -189,15 +193,19 @@ def __call__(self, XY, img, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["XY"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["XY"], param_names=self.params.keys() + ) # assumes first var is dependent var - res = self.mod.fit(img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs) + res = self.mod.fit( + img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs + ) ## old version, only return values # add reduced chisq to parameter list # res.best_values['chisq']=res.redchi @@ -269,7 +277,9 @@ def __call__(self, img, x=None, y=None, **kwargs): self.params["amp"].min = 0 return super(Gauss2DFitter, self).__call__(XY, img, **kwargs) - def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0): + def fitfunc( + self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0 + ): """ xy : 2 by N by N matrix containing x and y xy[0] : x @@ -287,7 +297,9 @@ def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigma yc = X.shape[0] // 2 return ( - amp * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + amp + * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + baseline ) @@ -334,7 +346,7 @@ def guess(self, img, XY=None, **kwargs): paramsdict["sigmay"] = 1 # print( paramsdict ) for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] # print( paramsdict ) return paramsdict diff --git a/pyCHX/v2/_commonspeckle/chx_compress.py b/pyCHX/v2/_commonspeckle/chx_compress.py index f6c1bf3..e2f4863 100644 --- a/pyCHX/v2/_commonspeckle/chx_compress.py +++ b/pyCHX/v2/_commonspeckle/chx_compress.py @@ -1,11 +1,8 @@ -import gc import os import pickle as pkl import shutil import struct import sys -from contextlib import closing -from glob import iglob from multiprocessing import Pool import dill @@ -26,7 +23,14 @@ # from pyCHX.v2._commonspeckle.chx_libs import (np, roi, time, datetime, os, getpass, db, # LogNorm, RUN_GUI) #common -from pyCHX.v2._commonspeckle.chx_libs import RUN_GUI, LogNorm, datetime, getpass, np, os, roi, time # common +from pyCHX.v2._commonspeckle.chx_libs import ( + RUN_GUI, + LogNorm, + np, + os, + roi, + time, +) # common # imports handler from CHX # this is where the decision is made whether or not to use dask @@ -40,7 +44,9 @@ def run_dill_encoded(what): def apply_async(pool, fun, args, callback=None): - return pool.apply_async(run_dill_encoded, (dill.dumps((fun, args)),), callback=callback) + return pool.apply_async( + run_dill_encoded, (dill.dumps((fun, args)),), callback=callback + ) def map_async(pool, fun, args): @@ -199,7 +205,9 @@ def compress_eigerdata( images_per_file=images_per_file, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) beg = 0 return read_compressed_eigerdata( mask, @@ -246,7 +254,9 @@ def read_compressed_eigerdata( CAL = True else: try: - mask, avg_img, imgsum, bad_frame_list_ = pkl.load(open(filename + ".pkl", "rb")) + mask, avg_img, imgsum, bad_frame_list_ = pkl.load( + open(filename + ".pkl", "rb") + ) except: CAL = True if CAL: @@ -311,7 +321,9 @@ def para_compress_eigerdata( if not copy_rawdata: images_ = EigerImages(data_path, images_per_file, md) else: - print("Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data.") + print( + "Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data." + ) print("Copying...") copy_data(data_path, new_path) # print(data_path, new_path) @@ -331,11 +343,17 @@ def para_compress_eigerdata( N = int(np.ceil(N / bins)) Nf = int(np.ceil(N / num_sub)) if Nf > cpu_core_number: - print("The process number is larger than %s (XF11ID server core number)" % cpu_core_number) + print( + "The process number is larger than %s (XF11ID server core number)" + % cpu_core_number + ) num_sub_old = num_sub num_sub = int(np.ceil(N / cpu_core_number)) Nf = int(np.ceil(N / num_sub)) - print("The sub compressed file number was changed from %s to %s" % (num_sub_old, num_sub)) + print( + "The sub compressed file number was changed from %s to %s" + % (num_sub_old, num_sub) + ) create_compress_header(md, filename + "-header", nobytes, bins, rot90=rot90) # print( 'done for header here') # print(data_path_, images_per_file) @@ -383,7 +401,7 @@ def para_compress_eigerdata( print("Bad frame list are: %s" % bad_frame_list) else: print("No bad frames are involved.") - print("Combining the seperated compressed files together...") + print("Combining the separated compressed files together...") combine_compressed(filename, Nf, del_old=True) del results del res_ @@ -396,7 +414,9 @@ def para_compress_eigerdata( def combine_compressed(filename, Nf, del_old=True): old_files = np.concatenate( - np.array([[filename + "-header"], [filename + "_temp-%i.tmp" % i for i in range(Nf)]]) + np.array( + [[filename + "-header"], [filename + "_temp-%i.tmp" % i for i in range(Nf)]] + ) ) combine_binary_files(filename, old_files, del_old) @@ -454,15 +474,22 @@ def para_segment_compress_eigerdata( num_sub *= bins if N % num_sub: Nf = N // num_sub + 1 - print("The average image intensity would be slightly not correct, about 1% error.") - print("Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image") + print( + "The average image intensity would be slightly not correct, about 1% error." + ) + print( + "Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image" + ) else: Nf = N // num_sub print("It will create %i temporary files for parallel compression." % Nf) if Nf > num_max_para_process: N_runs = np.int(np.ceil(Nf / float(num_max_para_process))) - print("The parallel run number: %s is larger than num_max_para_process: %s" % (Nf, num_max_para_process)) + print( + "The parallel run number: %s is larger than num_max_para_process: %s" + % (Nf, num_max_para_process) + ) else: N_runs = 1 result = {} @@ -538,7 +565,9 @@ def segment_compress_eigerdata( else: images = EigerImages(data_path, images_per_file, md)[N1:N2] if reverse: - images = reverse_updown(EigerImages(data_path, images_per_file, md))[N1:N2] + images = reverse_updown(EigerImages(data_path, images_per_file, md))[ + N1:N2 + ] if rot90: images = rot90_clockwise(images) @@ -580,7 +609,11 @@ def segment_compress_eigerdata( v = np.ravel(np.array(img, dtype=dtype))[p] dlen = len(p) imgsum[n] = v.sum() - if (dlen == 0) or (imgsum[n] > bad_pixel_threshold) or (imgsum[n] <= bad_pixel_low_threshold): + if ( + (dlen == 0) + or (imgsum[n] > bad_pixel_threshold) + or (imgsum[n] <= bad_pixel_low_threshold) + ): dlen = 0 fp.write(struct.pack("@I", dlen)) else: @@ -591,12 +624,16 @@ def segment_compress_eigerdata( if bins == 1: fp.write(struct.pack("@{}{}".format(dlen, "ih"[nobytes == 2]), *v)) else: - fp.write(struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v)) # n +=1 + fp.write( + struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v) + ) # n +=1 del p, v, img fp.flush() fp.close() avg_img /= good_count - bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | ( + np.array(imgsum) <= bad_pixel_low_threshold + ) sys.stdout.write("#") sys.stdout.flush() # del images, mask, avg_img, imgsum, bad_frame_list @@ -847,7 +884,8 @@ def init_compress_eigerdata( avg_img /= good_count bad_frame_list = np.where( - (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) )[0] # bad_frame_list1 = np.where( np.array(imgsum) > bad_pixel_threshold )[0] # bad_frame_list2 = np.where( np.array(imgsum) < bad_pixel_low_threshold )[0] @@ -1049,7 +1087,9 @@ def __init__(self, FD, bins=100): self.FD = FD if (FD.end - FD.beg) % bins: - print("Please give a better bins number and make the length of FD/bins= integer") + print( + "Please give a better bins number and make the length of FD/bins= integer" + ) else: self.bins = bins self.md = FD.md @@ -1058,7 +1098,12 @@ def __init__(self, FD, bins=100): Nimg = FD.end - FD.beg slice_num = Nimg // bins self.end = slice_num - self.time_edge = np.array(create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins)) + FD.beg + self.time_edge = ( + np.array( + create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins) + ) + + FD.beg + ) self.get_bin_frame() def get_bin_frame(self): @@ -1068,7 +1113,9 @@ def get_bin_frame(self): # print (n) t1, t2 = self.time_edge[n] # print( t1, t2) - self.frames[:, :, n] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + self.frames[:, :, n] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) def rdframe(self, n): return self.frames[:, :, n] @@ -1095,7 +1142,7 @@ def __init__(self, filename, mode="rb"): if mode == "wb": raise ValueError("Write mode 'wb' not supported yet") if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode # open the file descriptor @@ -1182,7 +1229,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # dlen is 4 bytes cur = self.frame_indexes[n] dlen = np.frombuffer(self._fd[cur : cur + 4], dtype=" bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold))[0] + np.where( + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) + )[0] + FD.beg ) diff --git a/pyCHX/v2/_commonspeckle/chx_compress_analysis.py b/pyCHX/v2/_commonspeckle/chx_compress_analysis.py index f1cc54d..f7ee808 100644 --- a/pyCHX/v2/_commonspeckle/chx_compress_analysis.py +++ b/pyCHX/v2/_commonspeckle/chx_compress_analysis.py @@ -1,13 +1,8 @@ from __future__ import absolute_import, division, print_function import logging -import os -import struct -from collections import namedtuple import matplotlib.pyplot as plt -from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags from tqdm import tqdm from pyCHX.v2._commonspeckle.chx_generic_functions import save_arrays # common @@ -17,30 +12,17 @@ from pyCHX.v2._commonspeckle.chx_libs import ( # common db rm; common RUN_GUI, Figure, - LogNorm, colors, - colors_, - datetime, - getpass, markers, - markers_, np, - os, roi, - time, ) logger = logging.getLogger(__name__) from pyCHX.v2._commonspeckle.chx_compress import ( # common - Multifile, - compress_eigerdata, get_avg_imgc, - get_each_frame_intensityc, - init_compress_eigerdata, mean_intensityc, - pass_FD, - read_compressed_eigerdata, ) from pyCHX.v2._commonspeckle.chx_generic_functions import find_bad_pixels_FD # common @@ -48,7 +30,9 @@ # from pyCHX.chx_compress import * -def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15): +def get_time_edge_avg_img( + FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15 +): """YG Dev Nov 14, 2017@CHX Update@2019/6/12 with option of apply a threshold for each frame Get averaged img by giving FD and frame edges @@ -74,7 +58,9 @@ def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=Fa for i in tqdm(range(Nt)): t1, t2 = frame_edge[i] if not apply_threshold: - d[i] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + d[i] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) else: dti = np.zeros([t2 - t1, avg_imgi.shape[0], avg_imgi.shape[1]]) j = 0 @@ -171,7 +157,9 @@ def cal_waterfallc( norm = np.bincount(qind)[1:] n = 0 # for i in tqdm(range( FD.beg , FD.end )): - for i in tqdm(range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex): + for i in tqdm( + range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 @@ -183,7 +171,9 @@ def cal_waterfallc( watf_ = watf.copy() watf = np.zeros([watf_.shape[0], waterfall_roi_size[0]]) for i in range(waterfall_roi_size[1]): - watf += watf_[:, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1)] + watf += watf_[ + :, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1) + ] watf /= waterfall_roi_size[0] if save: @@ -244,7 +234,9 @@ def plot_waterfallc( vmin = wat.min() if aspect is None: aspect = wat.shape[0] / wat.shape[1] - im = imshow(ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation) + im = imshow( + ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation + ) # im = ax.imshow(wat.T, cmap='viridis', vmax=vmax,extent= extent,interpolation = interpolation ) fig.colorbar(im) ax.set_aspect(aspect) @@ -263,7 +255,9 @@ def plot_waterfallc( return fig, ax, im -def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs): +def get_waterfallc( + FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs +): """plot waterfall for a giving compressed file FD: class object, the compressed file handler @@ -302,12 +296,16 @@ def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=Fals return wat -def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs): +def cal_each_ring_mean_intensityc( + FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs +): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = mean_intensityc(FD, ring_mask, sampling, index=None, multi_cor=multi_cor) + mean_int_sets, index_list = mean_intensityc( + FD, ring_mask, sampling, index=None, multi_cor=multi_cor + ) if timeperframe is None: times = np.arange(FD.end - FD.beg) + FD.beg # get the time for each frame else: @@ -316,7 +314,9 @@ def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, return times, mean_int_sets -def plot_each_ring_mean_intensityc(times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs): +def plot_each_ring_mean_intensityc( + times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs +): """ Plot time dependent mean intensity of each ring """ diff --git a/pyCHX/v2/_commonspeckle/chx_correlation.py b/pyCHX/v2/_commonspeckle/chx_correlation.py index 2ef23d2..37d1dc2 100644 --- a/pyCHX/v2/_commonspeckle/chx_correlation.py +++ b/pyCHX/v2/_commonspeckle/chx_correlation.py @@ -39,6 +39,7 @@ """ This module is for functions specific to time correlation """ + from __future__ import absolute_import, division, print_function from collections import namedtuple @@ -272,7 +273,7 @@ def lazy_one_time(image_iterable, num_levels, num_bufs, labels, internal_state=N ------ namedtuple A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -394,7 +395,7 @@ def multi_tau_auto_corr(num_levels, num_bufs, labels, images): author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -463,7 +464,9 @@ def two_time_corr(labels, images, num_frames, num_bufs, num_levels=1): return two_time_state_to_results(result) -def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None): +def lazy_two_time( + labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None +): """Generator implementation of two-time correlation If you do not want multi-tau correlation, set num_levels to 1 and num_bufs to the number of images you wish to correlate @@ -495,7 +498,7 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -523,7 +526,9 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i 010401(1-4), 2007. """ if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state @@ -579,7 +584,10 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) @@ -699,7 +707,9 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: g2[:, tind1, tind2] = tmp_binned / (pi_binned * fi_binned) * num_pixels @@ -805,7 +815,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -1051,22 +1063,32 @@ def __call__(self, img1, img2=None, normalization=None): self.tmpimgs[i].ravel()[self.subpxlsts[i]] = img1.ravel()[self.pxlsts[i]] if not self_correlation: self.tmpimgs2[i] *= 0 - self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[self.pxlsts[i]] + self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[ + self.pxlsts[i] + ] # multiply by maskcorrs > 0 to ignore invalid regions if self_correlation: ccorr = _cross_corr(self.tmpimgs[i]) * (self.maskcorrs[i] > 0) else: - ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * (self.maskcorrs[i] > 0) + ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * ( + self.maskcorrs[i] > 0 + ) # now handle the normalizations if "symavg" in normalization: # do symmetric averaging - Icorr = _cross_corr(self.tmpimgs[i] * self.submasks[i], self.submasks[i]) + Icorr = _cross_corr( + self.tmpimgs[i] * self.submasks[i], self.submasks[i] + ) if self_correlation: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs[i] * self.submasks[i] + ) else: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs2[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs2[i] * self.submasks[i] + ) # there is an extra condition that Icorr*Icorr2 != 0 w = np.where(np.abs(Icorr * Icorr2) > 0) ccorr[w] *= self.maskcorrs[i][w] / Icorr[w] / Icorr2[w] @@ -1074,7 +1096,10 @@ def __call__(self, img1, img2=None, normalization=None): if "regular" in normalization: # only run on overlapping regions for correlation w = self.pxlst_maskcorrs[i] - ccorr[w] /= self.maskcorrs[i][w] * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ccorr[w] /= ( + self.maskcorrs[i][w] + * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ) ccorrs.append(ccorr) diff --git a/pyCHX/v2/_commonspeckle/chx_correlationc.py b/pyCHX/v2/_commonspeckle/chx_correlationc.py index fb31982..864099f 100644 --- a/pyCHX/v2/_commonspeckle/chx_correlationc.py +++ b/pyCHX/v2/_commonspeckle/chx_correlationc.py @@ -4,7 +4,6 @@ This module is for computation of time correlation by using compressing algorithm """ - from __future__ import absolute_import, division, print_function import logging @@ -310,7 +309,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -398,7 +399,9 @@ def _init_state_one_time(num_levels, num_bufs, labels, cal_error=False): # matrix for normalizing G into g2 future_intensity = np.zeros_like(G) if cal_error: - G_all = np.zeros((int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64) + G_all = np.zeros( + (int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64 + ) # matrix for normalizing G into g2 past_intensity_all = np.zeros_like(G_all) @@ -502,7 +505,7 @@ def lazy_one_time( ------- A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -923,7 +926,7 @@ def multi_tau_auto_corr( author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -946,7 +949,9 @@ def multi_tau_auto_corr( return result.g2, result.lag_steps -def multi_tau_two_time_auto_corr(num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None): +def multi_tau_two_time_auto_corr( + num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None +): """Wraps generator implementation of multi-tau two time correlation This function computes two-time correlation Original code : author: Yugang Zhang @@ -1011,7 +1016,7 @@ def lazy_two_time( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -1040,7 +1045,9 @@ def lazy_two_time( num_frames = FD.end - FD.beg if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state qind, pixelist = roi.extract_label_indices(labels) @@ -1111,7 +1118,10 @@ def lazy_two_time( t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -1231,9 +1241,13 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: - g2[:, int(tind1), int(tind2)] = tmp_binned / (pi_binned * fi_binned) * num_pixels + g2[:, int(tind1), int(tind2)] = ( + tmp_binned / (pi_binned * fi_binned) * num_pixels + ) # print( num_pixels ) @@ -1343,11 +1357,16 @@ def cal_c12c( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) c12, lag_steps, state = multi_tau_two_time_auto_corr( @@ -1384,11 +1403,16 @@ def cal_g2c( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) if cal_error: @@ -1437,8 +1461,10 @@ def cal_g2c( g2[:g_max, qi - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, qi - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) print("G2 with error bar calculation DONE!") @@ -1498,7 +1524,7 @@ def __init__( pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - norm_inten: if True, each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + norm_inten: if True, each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if norm_inten is True: qind has to be given @@ -1552,14 +1578,20 @@ def get_data(self): pxlist = timg[p[w]] - 1 # np.bincount( qind[pxlist], weight= - if self.mean_int_sets is not None: # for each frame will normalize each ROI by it's averaged value + if ( + self.mean_int_sets is not None + ): # for each frame will normalize each ROI by it's averaged value for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1613,7 +1645,7 @@ def __init__( pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - mean_int_sets: each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + mean_int_sets: each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if mean_int_sets is not None: qind has to be not None @@ -1666,14 +1698,20 @@ def get_data(self): w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 - if self.mean_int_sets is not None: # for normalization of each averaged ROI of each frame + if ( + self.mean_int_sets is not None + ): # for normalization of each averaged ROI of each frame for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1749,7 +1787,7 @@ def auto_two_Arrayc(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1765,7 +1803,9 @@ def auto_two_Arrayc(data_pixel, rois, index=None): sum2 = sum1.T # print( qi, qlist, ) # print( g12b[:,:,qi -1 ] ) - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1773,12 +1813,12 @@ def auto_two_Arrayc(data_pixel, rois, index=None): def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): """ Dec 16, 2015, Y.G.@CHX - a numpy operation method to get two-time correlation function by giving explict normalization + a numpy operation method to get two-time correlation function by giving explicit normalization Parameters: data: images sequence, shape as [img[0], img[1], imgs_length] rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs - norm: if not None, shoud be the shape as data_pixel, will normalize two time by this norm + norm: if not None, should be the shape as data_pixel, will normalize two time by this norm if None, will return two time without normalization Options: @@ -1816,7 +1856,7 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1832,7 +1872,9 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): else: sum1 = 1 sum2 = 1 - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1881,7 +1923,7 @@ def two_time_norm(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1909,7 +1951,7 @@ def check_normalization(frame_num, q_list, imgsa, data_pixel): frame_num: integer, the number of frame to be checked q_list: list of integer, the list of q to be checked imgsa: the raw data - data_pixel: the normalized data, caculated by fucntion Get_Pixel_Arrayc + data_pixel: the normalized data, calculated by function Get_Pixel_Arrayc Plot the intensities """ fig, ax = plt.subplots(2) diff --git a/pyCHX/v2/_commonspeckle/chx_correlationp.py b/pyCHX/v2/_commonspeckle/chx_correlationp.py index 87843cd..868d6b6 100644 --- a/pyCHX/v2/_commonspeckle/chx_correlationp.py +++ b/pyCHX/v2/_commonspeckle/chx_correlationp.py @@ -3,31 +3,33 @@ yuzhang@bnl.gov This module is for parallel computation of time correlation """ + from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags from pyCHX.v2._commonspeckle.chx_compress import ( # common #TODO understand what to keep apply_async, - go_through_FD, - map_async, pass_FD, - run_dill_encoded, ) -from pyCHX.v2._commonspeckle.chx_correlationc import _one_time_process as _one_time_processp # common -from pyCHX.v2._commonspeckle.chx_correlationc import _one_time_process_error as _one_time_process_errorp -from pyCHX.v2._commonspeckle.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.v2._commonspeckle.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq -from pyCHX.v2._commonspeckle.chx_libs import tqdm # common #TODO why import from chx module? +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _one_time_process as _one_time_processp, +) # common +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _one_time_process_error as _one_time_process_errorp, +) +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _two_time_process as _two_time_processp, +) +from pyCHX.v2._commonspeckle.chx_correlationc import _validate_and_transform_inputs +from pyCHX.v2._commonspeckle.chx_libs import ( + tqdm, +) # common #TODO why import from chx module? logger = logging.getLogger(__name__) @@ -135,7 +137,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -241,7 +243,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -293,13 +298,20 @@ def cal_c12p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: S = norm.shape @@ -309,7 +321,9 @@ def cal_c12p( :, np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ), ] for i in np.unique(ring_mask)[1:] @@ -319,7 +333,9 @@ def cal_c12p( norm[ np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(ring_mask)[1:] @@ -386,7 +402,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -671,13 +687,20 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] @@ -689,7 +712,9 @@ def cal_g2p( :, np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ), ] for i in np.unique(ring_mask)[1:] @@ -699,7 +724,9 @@ def cal_g2p( norm[ np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(ring_mask)[1:] @@ -795,8 +822,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -834,17 +863,24 @@ def cal_GPF( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) if np.min(ring_mask) == 0: qstart = 1 else: qstart = 0 - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[qstart:] @@ -968,8 +1004,10 @@ def get_g2_from_ROI_GPF(G, P, F, roi_mask): g2[:g_max, i - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) return g2, g2_err @@ -1031,7 +1069,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/v2/_commonspeckle/chx_correlationp2.py b/pyCHX/v2/_commonspeckle/chx_correlationp2.py index ca8c0f0..da54206 100644 --- a/pyCHX/v2/_commonspeckle/chx_correlationp2.py +++ b/pyCHX/v2/_commonspeckle/chx_correlationp2.py @@ -5,31 +5,33 @@ Feb 20, 2018 The chx_correlationp2 is for dedug g2 """ + from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags from pyCHX.v2._commonspeckle.chx_compress import ( # common #TODO understand what to keep apply_async, - go_through_FD, - map_async, pass_FD, - run_dill_encoded, ) -from pyCHX.v2._commonspeckle.chx_correlationc import _one_time_process as _one_time_processp # common -from pyCHX.v2._commonspeckle.chx_correlationc import _one_time_process_error as _one_time_process_errorp -from pyCHX.v2._commonspeckle.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.v2._commonspeckle.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq -from pyCHX.v2._commonspeckle.chx_libs import tqdm # common #TODO why not from chx module?? +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _one_time_process as _one_time_processp, +) # common +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _one_time_process_error as _one_time_process_errorp, +) +from pyCHX.v2._commonspeckle.chx_correlationc import ( + _two_time_process as _two_time_processp, +) +from pyCHX.v2._commonspeckle.chx_correlationc import _validate_and_transform_inputs +from pyCHX.v2._commonspeckle.chx_libs import ( + tqdm, +) # common #TODO why not from chx module?? logger = logging.getLogger(__name__) @@ -137,7 +139,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -238,7 +240,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -290,13 +295,20 @@ def cal_c12p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: norms = [ @@ -370,7 +382,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -647,13 +659,20 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] @@ -763,8 +782,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -848,7 +869,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/v2/_commonspeckle/chx_generic_functions.py b/pyCHX/v2/_commonspeckle/chx_generic_functions.py index fb6db14..853d71c 100644 --- a/pyCHX/v2/_commonspeckle/chx_generic_functions.py +++ b/pyCHX/v2/_commonspeckle/chx_generic_functions.py @@ -1,7 +1,6 @@ import copy import datetime from os import listdir -from shutil import copyfile # from modest_image import imshow #common import matplotlib.cm as mcm @@ -9,10 +8,9 @@ import PIL import pytz import scipy -from matplotlib import cm from scipy.special import erf -from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta, twotheta_to_q -from skimage.draw import disk, ellipse, line, line_aa, polygon +from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta +from skimage.draw import disk, ellipse, polygon from skimage.filters import prewitt # from tqdm import * @@ -87,7 +85,9 @@ def generate_h5_list(inDir, filename): for fp_ in fp: if ".h5" in fp_: append_txtfile(filename=filename, data=np.array([FP_ + "/" + fp_])) - print("The full path of all the .h5 in %s has been saved in %s." % (inDir, filename)) + print( + "The full path of all the .h5 in %s has been saved in %s." % (inDir, filename) + ) print("You can use ./analysis/run_gui to visualize all the h5 file.") @@ -103,7 +103,7 @@ def fit_one_peak_curve(x, y, fit_range=None): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -248,7 +248,9 @@ def get_zero_nozero_qind_from_roi_mask(roi_mask, mask): return w, w1 -def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, old_cen, geometry): +def get_masked_qval_qwid_dict_using_Rmax( + new_mask, setup_pargs, old_roi_mask, old_cen, geometry +): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask using a Rmax method""" cy, cx = setup_pargs["center"] my, mx = new_mask.shape @@ -277,7 +279,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol "Ldet": setup_pargs["Ldet"], "lambda_": setup_pargs["lambda_"], } - qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict(roi_mask1, Fmask, setup_pargs_, geometry) + qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict( + roi_mask1, Fmask, setup_pargs_, geometry + ) # w = get_zero_qind_from_roi_mask(roi_mask1,Fmask) return qval_dict1, qwid_dict1 # ,w @@ -285,7 +289,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol def get_masked_qval_qwid_dict(roi_mask, mask, setup_pargs, geometry): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask""" - qval_dict_, qwid_dict_ = get_qval_qwid_dict(roi_mask, setup_pargs, geometry=geometry) + qval_dict_, qwid_dict_ = get_qval_qwid_dict( + roi_mask, setup_pargs, geometry=geometry + ) w, w1 = get_zero_nozero_qind_from_roi_mask(roi_mask, mask) qval_dictx = {k: v for (k, v) in list(qval_dict_.items()) if k not in w} qwid_dictx = {k: v for (k, v) in list(qwid_dict_.items()) if k not in w} @@ -303,7 +309,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): Input: roi_mask: integer type 2D array setup_pargs: dict, should at least contains, center (direct beam center), dpix (in mm), - lamda_: in A-1, Ldet: in mm + lambda_: in A-1, Ldet: in mm e.g., {'Ldet': 1495.0, abs #essential 'center': [-4469, 363], #essential @@ -315,7 +321,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): 'uid': 'uid=b85dad'} geometry: support saxs for isotropic transmission SAXS ang_saxs for anisotropic transmission SAXS - flow_saxs for anisotropic transmission SAXS under flow (center symetric) + flow_saxs for anisotropic transmission SAXS under flow (center symmetric) Return: qval_dict: dict, key as q-number, val: q val @@ -608,12 +614,16 @@ def plot_q_g2fitpara_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -643,8 +653,12 @@ def plot_q_g2fitpara_general( ax2 = fig.add_subplot(4, 1, 2) ax3 = fig.add_subplot(4, 1, 3) ax4 = fig.add_subplot(4, 1, 4) - plot1D(x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="") - plot1D(x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="") + plot1D( + x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="" + ) + plot1D( + x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="" + ) plot1D( x=qi, y=baselinei, @@ -795,12 +809,12 @@ def plot_xy_x2( **kwargs, ): """YG.@CHX 2019/10/ Plot x, y, x2, if have, will plot as twiny( same y, different x) - This funciton is primary for plot q-Iq + This function is primary for plot q-Iq Input: x: one-d array, x in one unit y: one-d array, - x2:one-d array, x in anoter unit + x2:one-d array, x in another unit pargs: dict, could include 'uid', 'path' loglog: if True, if plot x and y in log, by default plot in y-log save: if True, save the plot in the path defined in pargs @@ -853,7 +867,9 @@ def plot_xy_x2( fig.savefig(fp, dpi=fig.dpi) -def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0): +def save_oavs_tifs( + uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0 +): """save oavs as png""" tifs = list(db[uid].data("OAV_image"))[0] try: @@ -866,8 +882,12 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 h = db[uid] oavs = tifs - oav_period = h["descriptors"][0]["configuration"]["OAV"]["data"]["OAV_cam_acquire_period"] - oav_expt = h["descriptors"][0]["configuration"]["OAV"]["data"]["OAV_cam_acquire_time"] + oav_period = h["descriptors"][0]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_period" + ] + oav_expt = h["descriptors"][0]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_time" + ] oav_times = [] for i in range(len(oavs)): oav_times.append(oav_expt + i * oav_period) @@ -891,7 +911,7 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 plt.imshow(rgb_cont_img, interpolation="none", resample=True, cmap="gray") plt.axis("equal") - cross = [685, 440, 50] # definintion of direct beam: x, y, size + cross = [685, 440, 50] # definition of direct beam: x, y, size plt.plot( [cross[0] - cross[2] / 2, cross[0] + cross[2] / 2], [cross[1], cross[1]], @@ -903,7 +923,9 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 "r-", ) if pixel_scalebar is not None: - plt.plot([1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5) # scale bar. + plt.plot( + [1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5 + ) # scale bar. plt.text(1000, 50, text_string, fontsize=14, color="r") plt.text(600, 50, str(oav_times[m])[:5] + " [s]", fontsize=14, color="r") plt.axis("off") @@ -954,7 +976,8 @@ def evalue_array(array, verbose=True): ) if verbose: print( - "The min, max, avg, std of this array are: %s %s %s %s, respectively." % (_min, _max, avg, std) + "The min, max, avg, std of this array are: %s %s %s %s, respectively." + % (_min, _max, avg, std) ) return _min, _max, avg, std @@ -971,7 +994,10 @@ def find_good_xpcs_uids(fuids, Nlim=100, det=["4m", "1m", "500"]): """ guids = [] for i, uid in enumerate(fuids): - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): head = db[uid]["start"] for dec in head["detectors"]: for dt in det: @@ -1005,7 +1031,9 @@ def create_fullImg_with_box( roi_mask = np.zeros(shape, dtype=np.int32) for i in range(box_nx): for j in range(box_ny): - roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = i * box_ny + j + 1 + roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = ( + i * box_ny + j + 1 + ) # roi_mask *= mask return roi_mask @@ -1108,7 +1136,10 @@ def copy_data(old_path, new_path="/tmp_data/data/"): for fp in tqdm(fps): if not os.path.exists(new_path + os.path.basename(fp)): shutil.copy(fp, new_path) - print("The files %s are copied: %s." % (old_path[:-10] + "*", new_path + os.path.basename(fp))) + print( + "The files %s are copied: %s." + % (old_path[:-10] + "*", new_path + os.path.basename(fp)) + ) def delete_data(old_path, new_path="/tmp_data/data/"): @@ -1118,7 +1149,6 @@ def delete_data(old_path, new_path="/tmp_data/data/"): new_path: the new path """ import glob - import shutil # old_path = sud[2][0] # new_path = '/tmp_data/data/' @@ -1142,8 +1172,8 @@ def show_tif_series( ): """ tif_series: list of 2D tiff images - Nx: the number in the row for dispalying - center: the center of iamge (or direct beam pixel) + Nx: the number in the row for displaying + center: the center of image (or direct beam pixel) w: the ROI half size in pixel vmin: the min intensity value for plot vmax: if None, will be max intensity value of the ROI @@ -1186,9 +1216,6 @@ def show_tif_series( return fig, ax -from scipy.special import erf - - def ps(y, shift=0.5, replot=True, logplot="off", x=None): """ Dev 16, 2018 @@ -1223,7 +1250,10 @@ def is_positive(num): for i in range(len(y)): current_positive = is_positive(ym[i]) if current_positive != positive: - list_of_roots.append(x[i - 1] + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1])) + list_of_roots.append( + x[i - 1] + + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1]) + ) positive = not positive if len(list_of_roots) >= 2: FWHM = abs(list_of_roots[-1] - list_of_roots[0]) @@ -1340,13 +1370,19 @@ def create_seg_ring(ring_edges, ang_edges, mask, setup_pargs): flow_geometry=False, ) - roi_mask, good_ind = combine_two_roi_mask(roi_mask_qr, roi_mask_ang, pixel_num_thres=100) - qval_dict_ = get_qval_dict(qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False) + roi_mask, good_ind = combine_two_roi_mask( + roi_mask_qr, roi_mask_ang, pixel_num_thres=100 + ) + qval_dict_ = get_qval_dict( + qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False + ) qval_dict = {i: qval_dict_[k] for (i, k) in enumerate(good_ind)} return roi_mask, qval_dict -def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True): +def find_bad_pixels_FD( + bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True +): """Designed to find bad pixel list in 500K threshold: the max intensity in 5K """ @@ -1370,7 +1406,7 @@ def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=15): """DEV by Yugang@CHX, June 6, 2019 Get circular average of a time series using a dynamics mask, which pixel values are defined as - zeors if above a threshold. + zeros if above a threshold. Return an averaged q(pix)-Iq-q(A-1) of the whole time series using bin frames with bin_number Input: FD: the multifile handler for the time series @@ -1380,7 +1416,7 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 'dpix', 'Ldet','lambda_', 'center' bin_number: bin number of the frame threshold: define the dynamics mask, which pixel values are defined as - zeors if above this threshold + zeros if above this threshold Output: qp_saxs: q in pixel iq_saxs: intenstity @@ -1392,14 +1428,19 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 Nimg_ = FD.end - FD.beg # Nimg_ = 100 Nimg = Nimg_ // bin_number - time_edge = np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + beg + time_edge = ( + np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + + beg + ) for n in tqdm(range(Nimg)): t1, t2 = time_edge[n] # print(t1,t2) if bin_number == 1: avg_imgi = FD.rdframe(t1) else: - avg_imgi = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + avg_imgi = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) badpi = find_bad_pixels_FD( np.arange(t1, t2), FD, @@ -1408,7 +1449,9 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 show_progress=False, ) img = avg_imgi * mask * badpi - qp_saxsi, iq_saxsi, q_saxsi = get_circular_average(img, mask * badpi, save=False, pargs=setup_pargs) + qp_saxsi, iq_saxsi, q_saxsi = get_circular_average( + img, mask * badpi, save=False, pargs=setup_pargs + ) # print( img.max()) if t1 == FD.beg: qp_saxs, iq_saxs, q_saxs = ( @@ -1464,10 +1507,10 @@ def get_img_from_iq(qp, iq, img_shape, center): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1490,10 +1533,10 @@ def average_array_withNan(array, axis=0, mask=None): def deviation_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Get the deviation of array invovling np.nan along axis + Get the deviation of array involving np.nan along axis Input: - array: ND array + array: AND array axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1576,10 +1619,14 @@ def get_echos(dat_arr, min_distance=10): """ from skimage.feature import peak_local_max - max_ind = peak_local_max(dat_arr, min_distance) # !!! careful, skimage function reverses the order (wtf?) + max_ind = peak_local_max( + dat_arr, min_distance + ) # !!! careful, skimage function reverses the order (wtf?) min_ind = [] for i in range(len(max_ind[:-1])): - min_ind.append(max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]])) + min_ind.append( + max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]]) + ) # unfortunately, skimage function fu$$s up the format: max_ind is an array of a list of lists...fix this: mmax_ind = [] for l in max_ind: @@ -1592,7 +1639,7 @@ def pad_length(arr, pad_val=np.nan): """ arr: 2D matrix pad_val: values being padded - adds pad_val to each row, to make the length of each row equal to the lenght of the longest row of the original matrix + adds pad_val to each row, to make the length of each row equal to the length of the longest row of the original matrix -> used to convert python generic data object to HDF5 native format function fixes python bug in padding (np.pad) integer array with np.nan by LW 12/30/2017 @@ -1634,8 +1681,8 @@ def ls_dir(inDir, have_list=[], exclude_list=[]): """Y.G. Aug 1, 2019 List all filenames in a filefolder inDir: fullpath of the inDir - have_string: only retrun filename containing the string - exclude_string: only retrun filename not containing the string + have_string: only return filename containing the string + exclude_string: only return filename not containing the string """ from os import listdir @@ -1661,7 +1708,7 @@ def ls_dir2(inDir, string=None): """Y.G. Nov 1, 2017 List all filenames in a filefolder (not include hidden files and subfolders) inDir: fullpath of the inDir - string: if not None, only retrun filename containing the string + string: if not None, only return filename containing the string """ from os import listdir from os.path import isfile, join @@ -1669,7 +1716,9 @@ def ls_dir2(inDir, string=None): if string is None: tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) else: - tifs = np.array([f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)]) + tifs = np.array( + [f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)] + ) return tifs @@ -1749,16 +1798,20 @@ def get_roi_nr( qinterest = qslist[q] # qindices = [i for i,x in enumerate(qs) if x == qinterest] qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] - # print('q_indicies: ',qindices) + # print('q_indices: ',qindices) else: qinterest = q - qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] # new + qindices = [ + i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh + ] # new if phi_nr: phiinterest = phislist[phi] phiindices = [i for i, x in enumerate(phis) if x == phiinterest] else: phiinterest = phi - phiindices = [i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh] # new + phiindices = [ + i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh + ] # new # print('phi: %s phi_index: %s'%(phiinterest,phiindices)) # qindices = [i for i,x in enumerate(qs) if x == qinterest] # phiindices = [i for i,x in enumerate(phis) if x == phiinterest] @@ -1774,7 +1827,14 @@ def get_roi_nr( print(qslist) print("list of available phis:") print(phislist) - print("Roi number for Q= " + str(ret_list[1]) + " and phi= " + str(ret_list[2]) + ": " + str(ret_list[0])) + print( + "Roi number for Q= " + + str(ret_list[1]) + + " and phi= " + + str(ret_list[2]) + + ": " + + str(ret_list[0]) + ) return ret_list @@ -1785,7 +1845,7 @@ def get_fit_by_two_linear( mid_xpoint2=None, xrange=None, ): - """YG Octo 16,2017 Fit a curve with two linear func, the curve is splitted by mid_xpoint, + """YG Octo 16,2017 Fit a curve with two linear func, the curve is split by mid_xpoint, namely, fit the curve in two regions defined by (xmin,mid_xpoint ) and (mid_xpoint2, xmax) Input: x: 1D np.array @@ -1795,9 +1855,9 @@ def get_fit_by_two_linear( Return: D1, gmfit1, D2, gmfit2 : fit parameter (slope, background) of linear fit1 - convinent fit class, gmfit1(x) gives yvale + convenient fit class, gmfit1(x) gives yvale fit parameter (slope, background) of linear fit2 - convinent fit class, gmfit2(x) gives yvale + convenient fit class, gmfit2(x) gives yvale """ if xrange is None: @@ -1829,7 +1889,9 @@ def get_curve_turning_points( """YG Octo 16,2017 Get a turning point of a curve by doing a two-linear fit """ - D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear(x, y, mid_xpoint1, mid_xpoint2, xrange) + D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear( + x, y, mid_xpoint1, mid_xpoint2, xrange + ) return get_cross_point(x, gmfit1, gmfit2) @@ -1837,7 +1899,9 @@ def plot_fit_two_linear_fit(x, y, gmfit1, gmfit2, ax=None): """YG Octo 16,2017 Plot data with two fitted linear func""" if ax is None: fig, ax = plt.subplots() - plot1D(x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="") # logx=True, logy=True ) + plot1D( + x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="" + ) # logx=True, logy=True ) plot1D(x=x, y=gmfit1(x), ax=ax, c="r", m="", ls="-", legend="fit1") plot1D(x=x, y=gmfit2(x), ax=ax, c="b", m="", ls="-", legend="fit2") return ax @@ -1849,7 +1913,10 @@ def linear_fit(x, y, xrange=None): """ if xrange is not None: xmin, xmax = xrange - x1, x2 = find_index(x, xmin, tolerance=None), find_index(x, xmax, tolerance=None) + x1, x2 = ( + find_index(x, xmin, tolerance=None), + find_index(x, xmax, tolerance=None), + ) x_ = x[x1:x2] y_ = y[x1:x2] else: @@ -1961,22 +2028,32 @@ def sgolay2d(z, window_size, order, derivative=None): Z = np.zeros((new_shape)) # top band band = z[0, :] - Z[:half_size, half_size:-half_size] = band - np.abs(np.flipud(z[1 : half_size + 1, :]) - band) + Z[:half_size, half_size:-half_size] = band - np.abs( + np.flipud(z[1 : half_size + 1, :]) - band + ) # bottom band band = z[-1, :] - Z[-half_size:, half_size:-half_size] = band + np.abs(np.flipud(z[-half_size - 1 : -1, :]) - band) + Z[-half_size:, half_size:-half_size] = band + np.abs( + np.flipud(z[-half_size - 1 : -1, :]) - band + ) # left band band = np.tile(z[:, 0].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, :half_size] = band - np.abs(np.fliplr(z[:, 1 : half_size + 1]) - band) + Z[half_size:-half_size, :half_size] = band - np.abs( + np.fliplr(z[:, 1 : half_size + 1]) - band + ) # right band band = np.tile(z[:, -1].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, -half_size:] = band + np.abs(np.fliplr(z[:, -half_size - 1 : -1]) - band) + Z[half_size:-half_size, -half_size:] = band + np.abs( + np.fliplr(z[:, -half_size - 1 : -1]) - band + ) # central band Z[half_size:-half_size, half_size:-half_size] = z # top left corner band = z[0, 0] - Z[:half_size, :half_size] = band - np.abs(np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band) + Z[:half_size, :half_size] = band - np.abs( + np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band + ) # bottom right corner band = z[-1, -1] Z[-half_size:, -half_size:] = band + np.abs( @@ -1985,10 +2062,14 @@ def sgolay2d(z, window_size, order, derivative=None): # top right corner band = Z[half_size, -half_size:] - Z[:half_size, -half_size:] = band - np.abs(np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band) + Z[:half_size, -half_size:] = band - np.abs( + np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band + ) # bottom left corner band = Z[-half_size:, half_size].reshape(-1, 1) - Z[-half_size:, :half_size] = band - np.abs(np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band) + Z[-half_size:, :half_size] = band - np.abs( + np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band + ) # solve system and convolve if derivative == None: @@ -2003,7 +2084,9 @@ def sgolay2d(z, window_size, order, derivative=None): elif derivative == "both": c = np.linalg.pinv(A)[1].reshape((window_size, -1)) r = np.linalg.pinv(A)[2].reshape((window_size, -1)) - return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve(Z, -c, mode="valid") + return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve( + Z, -c, mode="valid" + ) def load_filelines(fullpath): @@ -2038,7 +2121,7 @@ def extract_data_from_file( good_line_pattern: str, data will be extract below this good_line_pattern Or giving start_row: int good_cols: list of integer, good index of cols - lables: the label of the good_cols + labels: the label of the good_cols #save: False, if True will save the data into a csv file with filename appending csv ?? Return: a pds.dataframe @@ -2122,7 +2205,7 @@ def get_print_uids(start_time, stop_time, return_all_info=False): def get_last_uids(n=-1): """YG Sep 26, 2017 - A Convinient function to copy uid to jupyter for analysis""" + A Convenient function to copy uid to jupyter for analysis""" uid = db[n]["start"]["uid"][:8] sid = db[n]["start"]["scan_id"] m = db[n]["start"]["Measurement"] @@ -2137,14 +2220,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -2285,7 +2370,9 @@ def get_mass_center_one_roi(FD, roi_mask, roi_ind): m = roi_mask == roi_ind cx, cy = np.zeros(int((FD.end - FD.beg) / 1)), np.zeros(int((FD.end - FD.beg) / 1)) n = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame" + ): img = FD.rdframe(i) * m c = scipy.ndimage.measurements.center_of_mass(img) cx[n], cy[n] = int(c[0]), int(c[1]) @@ -2419,7 +2506,9 @@ def create_chip_edges_mask(det="1M"): return mask -def create_ellipse_donut(cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0): +def create_ellipse_donut( + cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0 +): Nmax = np.max(np.unique(roi_mask)) rr1, cc1 = ellipse(cy, cx, wy_inner, wx_inner) rr2, cc2 = ellipse(cy, cx, wy_inner + gap, wx_inner + gap) @@ -2485,10 +2574,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -2505,7 +2594,7 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): """ Get taus in log steps ( a multi-taus defined taus ) for a time series with max frame number as fra_max Parameters: - fra_max: integer, the maximun frame number + fra_max: integer, the maximum frame number buf_num (default=8), Return: taus_in_log, a list @@ -2519,12 +2608,14 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): return lag_steps[lag_steps < fra_max] -def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8): +def get_series_g2_taus( + fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8 +): """ Get taus for dose dependent analysis Parameters: fra_max_list: a list, a lsit of largest available frame number - acq_time: acquistion time for each frame + acq_time: acquisition time for each frame log_taus: if true, will use the multi-tau defined taus bu using buf_num (default=8), otherwise, use deltau =1 Return: @@ -2547,8 +2638,8 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True if n > L: warnings.warn( "Warning: the dose value is too large, and please" - "check the maxium dose in this data set and give a smaller dose value." - "We will use the maxium dose of the data." + "check the maximum dose in this data set and give a smaller dose value." + "We will use the maximum dose of the data." ) n = L if log_taus: @@ -2559,11 +2650,13 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True return tausd -def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5)): +def check_lost_metadata( + md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5) +): """Y.G. Dec 31, 2016, check lost metadata Parameter: - md: dict, meta data dictionay + md: dict, meta data dictionary Nimg: number of frames for this uid metadata inc_x0/y0: incident beam center x0/y0, if None, will over-write the md['beam_center_x/y'] pixelsize: if md don't have ['x_pixel_size'], the pixelsize will add it @@ -2611,10 +2704,16 @@ def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * timeperframe = acquisition_period if inc_x0 is not None: mdn["beam_center_x"] = inc_y0 - print("Beam_center_x has been changed to %s. (no change in raw metadata): " % inc_y0) + print( + "Beam_center_x has been changed to %s. (no change in raw metadata): " + % inc_y0 + ) if inc_y0 is not None: mdn["beam_center_y"] = inc_x0 - print("Beam_center_y has been changed to %s. (no change in raw metadata): " % inc_x0) + print( + "Beam_center_y has been changed to %s. (no change in raw metadata): " + % inc_x0 + ) center = [ int(mdn["beam_center_x"]), int(mdn["beam_center_y"]), @@ -2765,11 +2864,11 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): bad_uids_index: a list of known bad uid list, default is None Return: guids: list, good uids - buids, list, bad uids + builds, list, bad uids """ import random - buids = [] + builds = [] guids = list(uids) # print( guids ) if bad_uids_index is None: @@ -2783,20 +2882,23 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): imgsa = apply_mask(imgs, mask) avg_img = get_avg_img(imgsa, img_samp_index, plot_=False, uid=uid) if avg_img.max() == 0: - buids.append(uid) + builds.append(uid) guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) print("The bad uid is: %s" % uid) else: guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) - buids.append(uid) + builds.append(uid) print("The bad uid is: %s" % uid) - print("The total and bad uids number are %s and %s, repsectively." % (len(uids), len(buids))) - return guids, buids + print( + "The total and bad uids number are %s and %s, respectively." + % (len(uids), len(builds)) + ) + return guids, builds def find_uids(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: sids: list, scan id uids: list, uid with 8 character length @@ -3022,7 +3124,9 @@ def get_bad_frame_list( fp = path + "%s" % (uid) + "_imgsum_analysis" + ".png" plt.savefig(fp, dpi=fig.dpi) - bd2 = list(np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start) + bd2 = list( + np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start + ) if return_ylim: return np.array(bd1 + bd2 + bd3), ymin, ymax @@ -3097,7 +3201,7 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): kwargs: overwrite the meta data, for example get_meta_data( uid = uid, sample = 'test') --> will overwrtie the meta's sample to test return: - meta data of the uid: a dictionay + meta data of the uid: a dictionary with keys: detector suid: the simple given uid @@ -3161,10 +3265,16 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): md.update(header.start.items()) # print(header.start.time) - md["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"])) - md["stop_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"])) + md["start_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"]) + ) + md["stop_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"]) + ) try: # added: try to handle runs that don't contain image data - md["img_shape"] = header["descriptors"][0]["data_keys"][md["detector"]]["shape"][:2][::-1] + md["img_shape"] = header["descriptors"][0]["data_keys"][md["detector"]][ + "shape" + ][:2][::-1] except: if verbose: print("couldn't find image shape...skip!") @@ -3218,7 +3328,9 @@ def get_max_countc(FD, labeled_array): ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): try: (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -3241,7 +3353,7 @@ def create_polygon_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3264,7 +3376,7 @@ def create_rectangle_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3274,7 +3386,9 @@ def create_rectangle_mask(image, xcorners, ycorners): return bst_mask -def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50, angles=[0]): +def create_multi_rotated_rectangle_mask( + image, center=None, length=100, width=50, angles=[0] +): """Developed at July 10, 2017 by Y.G.@CHX, NSLS2 Create multi rectangle-shaped mask by rotating a rectangle with a list of angles The original rectangle is defined by four corners, i.e., @@ -3316,7 +3430,9 @@ def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50 mask[rr, cc] = 1 mask_rot = np.zeros(image.shape, dtype=bool) for angle in angles: - mask_rot += np.array(rotate(mask, angle, center=center), dtype=bool) # , preserve_range=True) + mask_rot += np.array( + rotate(mask, angle, center=center), dtype=bool + ) # , preserve_range=True) return ~mask_rot @@ -3326,7 +3442,7 @@ def create_wedge(image, center, radius, wcors, acute_angle=True): wcors: [ [x1,x2,x3...], [y1,y2,y3..] """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cy, cx = center @@ -3362,7 +3478,7 @@ def create_cross_mask( """ Give image and the beam center to create a cross-shaped mask wy_left: the width of left h-line - wy_right: the width of rigth h-line + wy_right: the width of right h-line wx_up: the width of up v-line wx_down: the width of down v-line center_disk: if True, create a disk with center and center_radius @@ -3370,7 +3486,7 @@ def create_cross_mask( Return: the cross mask """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cx, cy = center @@ -3512,7 +3628,7 @@ def get_full_data_path(uid): def get_sid_filenames(header): """YG. Dev Jan, 2016 - Get a bluesky scan_id, unique_id, filename by giveing uid + Get a bluesky scan_id, unique_id, filename by giving uid Parameters ---------- @@ -3522,9 +3638,9 @@ def get_sid_filenames(header): ------- scan_id: integer unique_id: string, a full string of a uid - filename: sring + filename: string - Usuage: + Usage: sid,uid, filenames = get_sid_filenames(db[uid]) """ @@ -3539,8 +3655,10 @@ def get_sid_filenames(header): return header.start["scan_id"], header.start["uid"], filepaths -def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False): - """load bluesky scan data by giveing uid and detector +def load_data( + uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False +): + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3554,7 +3672,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3599,7 +3717,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro def mask_badpixels(mask, detector): """ - Mask known bad pixel from the giveing mask + Mask known bad pixel from the giving mask """ if detector == "eiger1m_single_image": @@ -3624,7 +3742,7 @@ def mask_badpixels(mask, detector): def load_data2(uid, detector="eiger4m_single_image"): - """load bluesky scan data by giveing uid and detector + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3636,7 +3754,7 @@ def load_data2(uid, detector="eiger4m_single_image"): image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3652,7 +3770,7 @@ def load_data2(uid, detector="eiger4m_single_image"): if flag: print("Can't Load Data!") - uid = "00000" # in case of failling load data + uid = "00000" # in case of failing load data imgs = 0 else: imgs = ev["data"][detector] @@ -3686,7 +3804,9 @@ def pload_obj(filename): return pickle.load(f) -def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs): +def load_mask( + path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs +): """load a mask file the mask is a numpy binary file (.npy) @@ -3694,14 +3814,14 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * ---------- path: the path of the mask file mask_name: the name of the mask file - plot_: a boolen type + plot_: a boolean type reverse: if True, reverse the image upside down to match the "real" image geometry (should always be True in the future) Returns ------- mask: array if plot_ =True, will show the mask - Usuage: + Usage: mask = load_mask( path, mask_name, plot_ = True ) """ @@ -3716,7 +3836,9 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * return mask -def create_hot_pixel_mask(img, threshold, center=None, center_radius=300, outer_radius=0): +def create_hot_pixel_mask( + img, threshold, center=None, center_radius=300, outer_radius=0 +): """create a hot pixel mask by giving threshold Input: img: the image to create hot pixel mask @@ -3898,7 +4020,9 @@ def show_img( extent=extent, ) if label_array is not None: - im2 = show_label_array(ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation) + im2 = show_label_array( + ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation + ) ax.set_title(image_name) if xlim is not None: @@ -3929,7 +4053,9 @@ def show_img( ax.set_aspect(aspect="auto") if show_colorbar: - cbar = fig.colorbar(im, extend="neither", spacing="proportional", orientation="vertical") + cbar = fig.colorbar( + im, extend="neither", spacing="proportional", orientation="vertical" + ) cbar.ax.tick_params(labelsize=colorbar_fontsize) fig.set_tight_layout(tight) if save: @@ -3976,7 +4102,7 @@ def plot1D( ---------- y: column-y x: column-x, by default x=None, the plot will use index of y as x-axis - the other paramaters are defined same as plt.plot + the other parameters are defined same as plt.plot Returns ------- None @@ -4092,7 +4218,9 @@ def plot1D( ### -def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs): +def check_shutter_open( + data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs +): """Check the first frame with shutter open Parameters @@ -4104,11 +4232,13 @@ def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, return: shutter_open_frame: a integer, the first frame number with open shutter - Usuage: + Usage: good_start = check_shutter_open( imgsa, min_inten=5, time_edge = [0,20], plot_ = False ) """ - imgsum = np.array([np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]]) + imgsum = np.array( + [np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]] + ) if plot_: fig, ax = plt.subplots() ax.plot(imgsum, "bo") @@ -4133,13 +4263,15 @@ def get_each_frame_intensity( """Get the total intensity of each frame by sampling every N frames Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: + Usage: imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = 1000, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -4203,7 +4335,9 @@ def create_time_slice(N, slice_num, slice_width, edges=None): return np.array(time_edge) -def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs): +def show_label_array( + ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs +): """ YG. Sep 26, 2017 Modified show_label_array(ax, label_array, cmap=None, **kwargs) @@ -4231,7 +4365,9 @@ def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nea _cmap = copy.copy((mcm.get_cmap(cmap))) _cmap.set_under("w", 0) vmin = max(0.5, kwargs.pop("vmin", 0.5)) - im = ax.imshow(label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs) + im = ax.imshow( + label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs + ) if aspect is None: ax.set_aspect(aspect="auto") # ax.set_aspect('equal') @@ -4457,7 +4593,7 @@ def show_ROI_on_image( def crop_image(image, crop_mask): """Crop the non_zeros pixels of an image to a new image""" - from skimage.util import crop, pad + from skimage.util import crop pxlst = np.where(crop_mask.ravel())[0] dims = crop_mask.shape @@ -4509,7 +4645,9 @@ def get_avg_img( if "uid" in kwargs.keys(): uid = kwargs["uid"] - im = ax.imshow(avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2)) + im = ax.imshow( + avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2) + ) # ax.set_title("Masked Averaged Image") ax.set_title("uid= %s--Masked Averaged Image" % uid) fig.colorbar(im) @@ -4530,7 +4668,9 @@ def get_avg_img( return avg_img -def check_ROI_intensity(avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs): +def check_ROI_intensity( + avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs +): """plot intensity versus pixel of a ring Parameters ---------- @@ -4597,19 +4737,29 @@ def cal_g2( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) print("Bad Frames involved!") - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(new_imgs)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(new_imgs) + ) print("G2 calculation DONE!") else: if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(image_series)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(image_series) + ) print("G2 calculation DONE!") return g2, lag_steps @@ -4644,13 +4794,12 @@ def trans_data_to_pd(data, label=None, dtype="array"): convert data into pandas.DataFrame Input: data: list or np.array - label: the coloum label of the data + label: the column label of the data dtype: list or array [[NOT WORK or dict (for dict only save the scalar not arrays values)]] Output: a pandas.DataFrame """ # lists a [ list1, list2...] all the list have the same length - import sys import pandas as pd from numpy import arange, array @@ -4662,7 +4811,7 @@ def trans_data_to_pd(data, label=None, dtype="array"): data = array(data) N, M = data.shape else: - print("Wrong data type! Now only support 'list' and 'array' tpye") + print("Wrong data type! Now only support 'list' and 'array' type") index = arange(N) if label is None: @@ -4672,7 +4821,9 @@ def trans_data_to_pd(data, label=None, dtype="array"): return df -def save_lists(data, label=None, filename=None, path=None, return_res=False, verbose=False): +def save_lists( + data, label=None, filename=None, path=None, return_res=False, verbose=False +): """ save_lists( data, label=None, filename=None, path=None) @@ -4776,13 +4927,13 @@ def save_arrays( def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): """YG Dev Nov 20, 2017@CHX - calculate particle g2 fucntion by giving particle radius, Q , and solution viscosity using a simple + calculate particle g2 function by giving particle radius, Q , and solution viscosity using a simple exponetional model Input: radius: m qr, list, in A-1 visocity: N*s/m^2 (water at 25K = 8.9*10^(-4) ) - T: temperture, in K + T: temperature, in K e.g., for a 250 nm sphere in glycerol/water (90:10) at RT (298K) gives: 1.38064852*10**(-123)*298 / ( 6*np.pi * 0.20871 * 250 *10**(-9)) * 10**20 /1e5 = 4.18*10**5 A2/s taus: time @@ -4795,7 +4946,9 @@ def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): g2_q1 = np.zeros(len(qr), dtype=object) for i, q1 in enumerate(qr): relaxation_rate = D0 * q1**2 - g2_q1[i] = simple_exponential(taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1) + g2_q1[i] = simple_exponential( + taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1 + ) return g2_q1 @@ -4915,7 +5068,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): spacing_is_list = isinstance(spacing, collections.Iterable) if width_is_list and spacing_is_list: if len(width) != len(spacing) + 1: - raise ValueError("List of spacings must be one less than list " "of widths.") + raise ValueError("List of spacings must be one less than list of widths.") if num_rings is None: try: num_rings = len(width) @@ -4935,7 +5088,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): if spacing_is_list: if num_rings - 1 != len(spacing): raise ValueError("num_rings does not match spacing list") - # Now regularlize the input. + # Now regularize the input. if not width_is_list: width = np.ones(num_rings) * width @@ -5010,13 +5163,12 @@ def trans_tf_to_td(tf, dtype="dframe"): import datetime import numpy as np - import pandas as pd """translate time.float to time.date, td.type dframe: a dataframe td.type list, a list """ - if dtype is "dframe": + if dtype == "dframe": ind = tf.index else: ind = range(len(tf)) @@ -5037,7 +5189,7 @@ def trans_td_to_tf(td, dtype="dframe"): td.type dframe: a dataframe td.type list, a list """ - if dtype is "dframe": + if dtype == "dframe": ind = td.index else: ind = range(len(td)) @@ -5121,7 +5273,9 @@ def get_averaged_data_from_multi_res( if D != 3: keystr_average[sk[i] : sk[i + 1]] /= avg_count[sk[i + 1]] else: - keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[sk[i + 1]] + keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[ + sk[i + 1] + ] return keystr_average @@ -5160,7 +5314,9 @@ def save_g2_general(g2, taus, qr=None, qz=None, uid="uid", path=None, return_res # filename += '-uid=%s.csv' % (uid) filename1 = os.path.join(path, filename) df.to_csv(filename1) - print("The correlation function is saved in %s with filename as %s" % (path, filename)) + print( + "The correlation function is saved in %s with filename as %s" % (path, filename) + ) if return_res: return df @@ -5179,17 +5335,35 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): def simple_exponential_with_vibration(x, beta, relaxation_rate, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * relaxation_rate * x) + baseline + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * relaxation_rate * x) + + baseline + ) -def stretched_auto_corr_scat_factor_with_vibration(x, beta, relaxation_rate, alpha, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * (relaxation_rate * x) ** alpha) + baseline +def stretched_auto_corr_scat_factor_with_vibration( + x, beta, relaxation_rate, alpha, freq, amp, baseline=1 +): + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * (relaxation_rate * x) ** alpha) + + baseline + ) -def flow_para_function_with_vibration(x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1): +def flow_para_function_with_vibration( + x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1 +): vibration_part = 1 + amp * np.cos(2 * np.pi * freq * x) Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * vibration_part * Diff_part * Flow_part + baseline @@ -5197,11 +5371,17 @@ def flow_para_function(x, beta, relaxation_rate, flow_velocity, baseline=1): """flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )""" Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline -def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0): +def flow_para_function_explicitq( + x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0 +): """Nov 9, 2017 Basically, make q vector to (qr, angle), ###relaxation_rate is actually a diffusion rate flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) @@ -5216,7 +5396,14 @@ def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, bas Flow_part = ( np.pi**2 / (16 * x * flow_velocity * qr * abs(np.cos(q_ang))) - * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang))))) ** 2 + * abs( + erf( + np.sqrt( + 4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang)) + ) + ) + ) + ** 2 ) else: Flow_part = 1 @@ -5229,12 +5416,18 @@ def get_flow_velocity(average_velocity, shape_factor): return average_velocity * (1 - shape_factor) / (1 + shape_factor) -def stretched_flow_para_function(x, beta, relaxation_rate, alpha, flow_velocity, baseline=1): +def stretched_flow_para_function( + x, beta, relaxation_rate, alpha, flow_velocity, baseline=1 +): """ flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) """ Diff_part = np.exp(-2 * (relaxation_rate * x) ** alpha) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline @@ -5252,10 +5445,14 @@ def get_g2_fit_general_two_steps( i) Using the "function" to fit whole g2 to get baseline and beta (contrast) ii) Then using the obtained baseline and beta to fit g2 in a "second_fit_range" by using simple_exponential function """ - g2_fit_result, taus_fit, g2_fit = get_g2_fit_general(g2, taus, function, sequential_fit, *argv, **kwargs) + g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( + g2, taus, function, sequential_fit, *argv, **kwargs + ) guess_values = {} for k in list(g2_fit_result[0].params.keys()): - guess_values[k] = np.array([g2_fit_result[i].params[k].value for i in range(g2.shape[1])]) + guess_values[k] = np.array( + [g2_fit_result[i].params[k].value for i in range(g2.shape[1])] + ) if "guess_limits" in kwargs: guess_limits = kwargs["guess_limits"] @@ -5315,9 +5512,9 @@ def get_g2_fit_general( supported function include: 'simple_exponential' (or 'simple'): fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential'(or 'streched'): fit by a streched exponential function, defined as + 'stretched_exponential'(or 'stretched'): fit by a stretched exponential function, defined as beta * ( np.exp( -2 * ( relaxation_rate * tau )**alpha ) + baseline - 'stretched_vibration': fit by a streched exponential function with vibration, defined as + 'stretched_vibration': fit by a stretched exponential function with vibration, defined as beta * (1 + amp*np.cos( 2*np.pi*60* x) )* np.exp(-2 * (relaxation_rate * x)**alpha) + baseline 'flow_para_function' (or flow): fit by a flow function @@ -5329,7 +5526,7 @@ def get_g2_fit_general( beta, relaxation_rate , alpha ,baseline values: a False or True, False for not vary 'guess_values': a dict, for initial value of the fitting para, - the defalut values are + the default values are dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0) 'guess_limits': a dict, for the limits of the fittting para, for example: @@ -5338,7 +5535,7 @@ def get_g2_fit_general( dict( baseline =[0.5, 2.5], alpha=[0, inf] ,beta = [0, 1], relaxation_rate= [0.0,1000] ) Returns ------- - fit resutls: a instance in limfit + fit results: a instance in limfit tau_fit fit_data by the model, it has the q number of g2 @@ -5367,16 +5564,22 @@ def get_g2_fit_general( _vars = [] if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) elif function == "flow_para_function_explicitq" or function == "flow_para_qang": mod = Model(flow_para_function_explicitq) # , independent_vars= _vars) - elif function == "flow_para_function_with_vibration" or function == "flow_vibration": + elif ( + function == "flow_para_function_with_vibration" or function == "flow_vibration" + ): mod = Model(flow_para_function_with_vibration) else: @@ -5397,7 +5600,11 @@ def get_g2_fit_general( for k in list(guess_limits.keys()): mod.set_param_hint(k, min=guess_limits[k][0], max=guess_limits[k][1]) - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): mod.set_param_hint("flow_velocity", min=0) if function == "flow_para_function_explicitq" or function == "flow_para_qang": mod.set_param_hint("flow_velocity", min=0) @@ -5545,7 +5752,9 @@ def get_g2_fit_general( # pars[k].value = _guess_val[k][i] if function == "flow_para_function_explicitq" or function == "flow_para_qang": if qval_dict is None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: pars = mod.make_params( beta=_beta_, @@ -5722,14 +5931,14 @@ def plot_g2_general( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline geometry: 'saxs': a saxs with Qr partition 'ang_saxs': a saxs with Qr and angular partition 'gi_saxs': gisaxs with Qz, Qr - one_plot: if True, plot all images in one pannel + one_plot: if True, plot all images in one panel kwargs: Returns @@ -5827,12 +6036,16 @@ def plot_g2_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -5880,22 +6093,30 @@ def plot_g2_general( # ax = fig[fig_subnum].add_subplot(sx,sy, i + 1 - fig_subnum*max_plotnum_fig) fig_subnum = i // max_plotnum_fig # print( i, sx,sy, fig_subnum, max_plotnum_fig, i + 1 - fig_subnum*max_plotnum_fig ) - ax = fig[fig_subnum].add_subplot(sx, sy, i + 1 - fig_subnum * max_plotnum_fig) + ax = fig[fig_subnum].add_subplot( + sx, sy, i + 1 - fig_subnum * max_plotnum_fig + ) ax.set_ylabel(r"$%s$" % ylabel + "(" + r"$\tau$" + ")") ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) if master_plot == "qz" or master_plot == "angle": if geometry != "gi_waxs": - title_long = r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = r"$Q_r= $" + "%i " % (long_label[l_ind]) # print( title_long,long_label,l_ind ) else: if geometry == "ang_saxs": # title_long = 'Ang= ' + '%.2f'%( long_label[l_ind] ) + r'$^\circ$' + '( %d )'%(l_ind) - title_long = "Ang= " + "%.2f" % (long_label[l_ind]) # + r'$^\circ$' + '( %d )'%(l_ind) + title_long = ( + "Ang= " + "%.2f" % (long_label[l_ind]) + ) # + r'$^\circ$' + '( %d )'%(l_ind) elif geometry == "gi_saxs": - title_long = r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = "" # print( master_plot ) @@ -5912,7 +6133,9 @@ def plot_g2_general( if qth_interest is not None: # it might have a bug here, todolist!!! lab = sorted(list(qval_dict_.keys())) # print( lab, l_ind) - ax.set_title(title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12) + ax.set_title( + title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12 + ) for ki, k in enumerate(list(g2_dict_.keys())): if ki == 0: c = "b" @@ -5967,7 +6190,9 @@ def plot_g2_general( else: yerr = g2_err_dict[k][nlst][:, l_ind] if g2_labels is None: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) else: if nlst == 0: ax.errorbar( @@ -5980,7 +6205,9 @@ def plot_g2_general( label=g2_labels[ki], ) else: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) ax.set_xscale("log", nonposx="clip") if nlst == 0: if l_ind == 0: @@ -6000,7 +6227,9 @@ def plot_g2_general( if g2_labels is None: ax.semilogx(x, y, m, color=c, markersize=6) else: - ax.semilogx(x, y, m, color=c, markersize=6, label=g2_labels[ki]) + ax.semilogx( + x, y, m, color=c, markersize=6, label=g2_labels[ki] + ) else: yerr = g2_err_dict[k][:, l_ind] # print(x.shape, y.shape, yerr.shape) @@ -6040,17 +6269,26 @@ def plot_g2_general( elif function == "flow_vibration": rate = result1.best_values["relaxation_rate"] freq = result1.best_values["freq"] - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): rate = result1.best_values["relaxation_rate"] flow = result1.best_values["flow_velocity"] - if function == "flow_para_function_explicitq" or function == "flow_para_qang": + if ( + function == "flow_para_function_explicitq" + or function == "flow_para_qang" + ): diff = result1.best_values["diffusion"] qrr = short_ulabel[s_ind] # print(qrr) rate = diff * qrr**2 flow = result1.best_values["flow_velocity"] if qval_dict_ is None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: pass @@ -6083,7 +6321,9 @@ def plot_g2_general( txts = r"$baseline$" + r"$ = %.3f$" % (baseline) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if ( function == "flow_para_function" @@ -6113,7 +6353,9 @@ def plot_g2_general( txts = r"$\beta$" + r"$ = %.3f$" % (beta) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -6132,7 +6374,7 @@ def plot_g2_general( else: fp = path + filename + "_%s_%s" % (mastp, s_ind) - if append_name is not "": + if append_name != "": fp = fp + append_name fps.append(fp + ".png") # if num_long_i <= 16: @@ -6150,7 +6392,7 @@ def plot_g2_general( for fn, f in enumerate(fig): f.set_tight_layout(True) fp = path + filename + "_q_%s_%s" % (fn * 16, (fn + 1) * 16) - if append_name is not "": + if append_name != "": fp = fp + append_name fps.append(fp + ".png") f.savefig(fp + ".png", dpi=f.dpi) @@ -6159,7 +6401,7 @@ def plot_g2_general( if (num_short != 1) or (num_long_i > 16): outputfile = path + filename + ".png" - if append_name is not "": + if append_name != "": outputfile = path + filename + append_name + "__joint.png" else: outputfile = path + filename + "__joint.png" @@ -6172,7 +6414,9 @@ def power_func(x, D0, power=2): return D0 * x**power -def get_q_rate_fit_general(qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs): +def get_q_rate_fit_general( + qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs +): """ Dec 26,2016, Y.G.@CHX @@ -6337,7 +6581,9 @@ def plot_q_rate_fit_general( if show_text: txts = r"$D0: %.3e$" % D0 + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) if Nqz != 1: legend = ax.legend(loc="best") diff --git a/pyCHX/v2/_commonspeckle/chx_handlers.py b/pyCHX/v2/_commonspeckle/chx_handlers.py index 998ce9c..3ca8aa1 100644 --- a/pyCHX/v2/_commonspeckle/chx_handlers.py +++ b/pyCHX/v2/_commonspeckle/chx_handlers.py @@ -5,7 +5,6 @@ # handler registration and database instantiation should be done # here and only here! from databroker import Broker -from databroker.assets.handlers_base import HandlerBase from eiger_io.fs_handler import EigerHandler as EigerHandlerPIMS from eiger_io.fs_handler import EigerImages as EigerImagesPIMS diff --git a/pyCHX/v2/_commonspeckle/chx_libs.py b/pyCHX/v2/_commonspeckle/chx_libs.py index ba0fef5..c945cfe 100644 --- a/pyCHX/v2/_commonspeckle/chx_libs.py +++ b/pyCHX/v2/_commonspeckle/chx_libs.py @@ -3,27 +3,12 @@ yuzhang@bnl.gov This module is for the necessary packages for the XPCS analysis """ -import collections -import copy -import getpass + import itertools -import os -import pickle -import random -import sys -import time -import warnings -from datetime import datetime -import h5py import matplotlib as mpl -import matplotlib.cm as mcm import matplotlib.pyplot as plt import numpy as np -import pims -import skbeam.core.correlation as corr -import skbeam.core.roi as roi -import skbeam.core.utils as utils # from modest_image import imshow #common # edit handlers here to switch to PIMS or dask @@ -38,21 +23,6 @@ # - https://github.com/scikit-beam/scikit-beam # * xray-vision - plotting helper functions for X-ray science # - https://github.com/Nikea/xray-vision -import xray_vision -import xray_vision.mpl_plotting as mpl_plot -from IPython.core.magics.display import Javascript -from lmfit import Model, Parameter, Parameters, minimize, report_fit -from matplotlib import gridspec -from matplotlib.colors import LogNorm -from matplotlib.figure import Figure -from mpl_toolkits.axes_grid1 import make_axes_locatable -from pandas import DataFrame -from PIL import Image -from skbeam.core.utils import multi_tau_lags -from skimage.draw import disk, ellipse, line, line_aa, polygon -from tqdm import tqdm -from xray_vision.mask.manual_mask import ManualMask -from xray_vision.mpl_plotting import speckle mcolors = itertools.cycle( [ @@ -400,7 +370,9 @@ [1, 0, 0], [0.5, 0.0, 0.0], ] -cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list("cmap_jet_extended", color_list_jet_extended) +cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_jet_extended", color_list_jet_extended +) # Tweaked version of "view.gtk" default color scale color_list_vge = [ @@ -423,9 +395,11 @@ [254.0 / 255.0, 254.0 / 255.0, 0.0 / 255.0], [254.0 / 255.0, 254.0 / 255.0, 254.0 / 255.0], ] -cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list("cmap_vge_hdr", color_list_vge_hdr) +cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_vge_hdr", color_list_vge_hdr +) -# Simliar to Dectris ALBULA default color-scale +# Similar to Dectris ALBULA default color-scale color_list_hdr_albula = [ [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], [0.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], @@ -433,9 +407,13 @@ [255.0 / 255.0, 255.0 / 255.0, 0.0 / 255.0], # [ 255.0/255.0, 255.0/255.0, 255.0/255.0], ] -cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_albula", color_list_hdr_albula) +cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_albula", color_list_hdr_albula +) cmap_albula = cmap_hdr_albula -cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_r", color_list_hdr_albula[::-1]) +cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_r", color_list_hdr_albula[::-1] +) # Ugly color-scale, but good for highlighting many features in HDR data color_list_cur_hdr_goldish = [ @@ -449,4 +427,6 @@ [200.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], # red [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], # white ] -cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_goldish", color_list_cur_hdr_goldish) +cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_goldish", color_list_cur_hdr_goldish +) diff --git a/pyCHX/v2/_commonspeckle/chx_olog.py b/pyCHX/v2/_commonspeckle/chx_olog.py index 880c9f4..8e39c47 100644 --- a/pyCHX/v2/_commonspeckle/chx_olog.py +++ b/pyCHX/v2/_commonspeckle/chx_olog.py @@ -110,12 +110,15 @@ def update_olog_id(logid, text, attachments, verbose=True): ) client.updateLog(logid, upd) if verbose: - print(f"The url={url} was successfully updated with {text} and with " f"the attachments") + print( + f"The url={url} was successfully updated with {text} and with " + f"the attachments" + ) def update_olog_uid(uid, text, attachments): """ - Update olog book logid entry cotaining uid string with text and attachments + Update olog book logid entry containing uid string with text and attachments files. Parameters diff --git a/pyCHX/v2/_commonspeckle/chx_speckle.py b/pyCHX/v2/_commonspeckle/chx_speckle.py index 75ab068..134913b 100644 --- a/pyCHX/v2/_commonspeckle/chx_speckle.py +++ b/pyCHX/v2/_commonspeckle/chx_speckle.py @@ -10,7 +10,6 @@ import logging import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series @@ -19,13 +18,10 @@ import sys from datetime import datetime -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq def xsvs( @@ -84,8 +80,8 @@ def xsvs( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -266,7 +262,7 @@ def xsvs( prob_k_all[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) prob_k_std_dev[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) - logger.info("Processing time for XSVS took %s seconds." "", (time.time() - start_time)) + logger.info("Processing time for XSVS took %s seconds.", (time.time() - start_time)) elapsed_time = time.time() - start_time # print (Num) print("Total time: %.2f min" % (elapsed_time / 60.0)) @@ -332,7 +328,9 @@ def _process( roi_data = data[labels == label] spe_hist, bin_edges = np.histogram(roi_data, bins=bin_edges, density=True) spe_hist = np.nan_to_num(spe_hist) - prob_k[level, j] += (spe_hist - prob_k[level, j]) / (img_per_level[level] - track_bad_level[level]) + prob_k[level, j] += (spe_hist - prob_k[level, j]) / ( + img_per_level[level] - track_bad_level[level] + ) prob_k_pow[level, j] += (np.power(spe_hist, 2) - prob_k_pow[level, j]) / ( img_per_level[level] - track_bad_level[level] @@ -417,7 +415,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln @@ -425,8 +422,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -509,8 +506,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -576,9 +573,9 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline @@ -600,7 +597,7 @@ def plot_sxvs( xlim=[0, 3.5], time_steps=None, ): - """a convinent function to plot sxvs results""" + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -646,7 +643,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -654,18 +651,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -745,9 +741,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" @@ -1154,7 +1154,9 @@ def get_max_countc(FD, labeled_array): ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -1197,7 +1199,9 @@ def plot_g2_contrast( # fig = plt.figure(figsize=(14, 10)) fig = plt.figure() - plt.title("uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08) + plt.title( + "uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08 + ) if qth is None: plt.axis("off") n = 1 diff --git a/pyCHX/v2/_commonspeckle/chx_specklecp.py b/pyCHX/v2/_commonspeckle/chx_specklecp.py index 771e51f..4060a07 100644 --- a/pyCHX/v2/_commonspeckle/chx_specklecp.py +++ b/pyCHX/v2/_commonspeckle/chx_specklecp.py @@ -8,36 +8,25 @@ from __future__ import absolute_import, division, print_function import logging -import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series logger = logging.getLogger(__name__) -import itertools import os -import sys from datetime import datetime from multiprocessing import Pool -import dill -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq from tqdm import tqdm from pyCHX.v2._commonspeckle.chx_compress import ( # common apply_async, - go_through_FD, - map_async, pass_FD, - run_dill_encoded, ) from pyCHX.v2._commonspeckle.chx_generic_functions import trans_data_to_pd # common @@ -157,14 +146,18 @@ def xsvsp_single( number_of_img = noframes for i in range(FD.beg, FD.end): pass_FD(FD, i) - label_arrays = [np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:]] + label_arrays = [ + np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:] + ] qind, pixelist = roi.extract_label_indices(label_array) if norm is not None: norms = [ norm[ np.in1d( pixelist, - extract_label_indices(np.array(label_array == i, dtype=np.int64))[1], + extract_label_indices(np.array(label_array == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(label_array)[1:] @@ -360,7 +353,7 @@ def xsvsc_single( norm=None, progress_bar=True, ): - """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with phisical meaning, + """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with physical meaning, photon_number@one_particular_count = photon_tolal_number * photon_distribution@one_particular_count +/- sqrt( photon_number@one_particular_count ) @@ -409,8 +402,8 @@ def xsvsc_single( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -702,7 +695,10 @@ def get_his_std_qi(data_pixel_qi, max_cts=None): bins = np.arange(max_cts) dqn, dqm = data_pixel_qi.shape # get histogram here - H = np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) / dqm + H = ( + np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) + / dqm + ) # do average for different frame his = np.average(H, axis=0) std = np.std(H, axis=0) @@ -733,7 +729,9 @@ def get_his_std(data_pixel, rois, max_cts=None): for qi in range(noqs): pixelist_qi = np.where(qind == qi + 1)[0] # print(qi, max_cts) - bins, his[qi], std[qi], kmean[qi] = get_his_std_qi(data_pixel[:, pixelist_qi], max_cts) + bins, his[qi], std[qi], kmean[qi] = get_his_std_qi( + data_pixel[:, pixelist_qi], max_cts + ) return bins, his, std, kmean @@ -807,7 +805,9 @@ def get_binned_his_std_qi(data_pixel_qi, lag_steps, max_cts=None): i = 0 for lag in lag_steps: data_pixel_qi_ = np.sum(reshape_array(data_pixel_qi, lag), axis=1) - bins[i], his[i], std[i], kmean[i] = get_his_std_qi(data_pixel_qi_, max_cts * lag) + bins[i], his[i], std[i], kmean[i] = get_his_std_qi( + data_pixel_qi_, max_cts * lag + ) i += 1 return bins, his, std, kmean @@ -896,7 +896,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln ###########################3 @@ -967,9 +966,9 @@ def nbinomlog1(p, hist, x, N, mu): Vary M (shape param) but mu (count rate) fixed (using leastsq) p: fitting parameter, in this case is M, coherent mode number - hist: histogram of photon count for each bin (is a number not probablity) + hist: histogram of photon count for each bin (is a number not probability) x: photon count - N: total photons count in the statistics, ( probablity = hist / N ) + N: total photons count in the statistics, ( probability = hist / N ) mu: average photon count for each bin """ @@ -1222,7 +1221,9 @@ def plot_xsvs_fit( Knorm_bin_edges[j, i][:L], spe_cts_all[j, i], ) - xscale = (x_ / x)[1] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] + xscale = (x_ / x)[ + 1 + ] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] # print( xscale ) else: max_cts_ = max_cts * lag_steps[j] @@ -1271,7 +1272,11 @@ def plot_xsvs_fit( # if j == 0: if j < 2: label = "nbinom_L" - txts = r"$M=%s$" % round(ML_val[i][j], 2) + "," + r"$K=%s$" % round(KL_val[i][j], 2) + txts = ( + r"$M=%s$" % round(ML_val[i][j], 2) + + "," + + r"$K=%s$" % round(KL_val[i][j], 2) + ) # print( ML_val[i] ) x = 0.05 y0 = 0.2 - j * 0.1 @@ -1279,7 +1284,9 @@ def plot_xsvs_fit( fontsize_ = fontsize * 2 else: fontsize_ = 18 - axes.text(x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes) + axes.text( + x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes + ) else: label = "" (art,) = axes.plot(fitx_, fitL, "-r", label=label) @@ -1350,7 +1357,9 @@ def save_KM(K_mean, KL_val, ML_val, qs=None, level_time=None, uid=None, path=Non + ["M_Fit_%s" % s for s in level_time] + ["Contrast_Fit_%s" % s for s in level_time] ) - data = np.hstack([(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)]) + data = np.hstack( + [(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)] + ) if qs is not None: qs = np.array(qs) l = ["q"] + l @@ -1394,9 +1403,15 @@ def get_his_std_from_pds(spec_pds, his_shapes=None): spec_std = np.zeros([M, N], dtype=np.object) for i in range(M): for j in range(N): - spec_his[i, j] = np.array(spec_pds[spkeys[1 + i * N + j]][~np.isnan(spec_pds[spkeys[1 + i * N + j]])]) + spec_his[i, j] = np.array( + spec_pds[spkeys[1 + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + i * N + j]]) + ] + ) spec_std[i, j] = np.array( - spec_pds[spkeys[1 + 2 * N + i * N + j]][~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]])] + spec_pds[spkeys[1 + 2 * N + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]]) + ] ) return spec_his, spec_std @@ -1573,8 +1588,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -1657,8 +1672,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -1724,9 +1739,9 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline @@ -1739,7 +1754,7 @@ def plot_sxvs( xlim=[0, 3.5], time_steps=None, ): - """a convinent function to plot sxvs results""" + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -1785,7 +1800,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -1793,18 +1808,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -1884,9 +1898,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" diff --git a/pyCHX/v2/_commonspeckle/chx_xpcs_xsvs_jupyter_V1.py b/pyCHX/v2/_commonspeckle/chx_xpcs_xsvs_jupyter_V1.py index e9b8876..a498a4b 100644 --- a/pyCHX/v2/_commonspeckle/chx_xpcs_xsvs_jupyter_V1.py +++ b/pyCHX/v2/_commonspeckle/chx_xpcs_xsvs_jupyter_V1.py @@ -7,14 +7,17 @@ # from pyCHX.chx_libs import markers import pandas as pds -from pyCHX.v2._commonspeckle.chx_libs import colors, markers # common #TODO all other instances import with () +from pyCHX.v2._commonspeckle.chx_libs import ( + colors, + markers, +) # common #TODO all other instances import with () def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): """Get Iq at different time edge (difined by slice_num and slice_width) for a list of uids Input: uid_list: list of string (uid) - setup_pargs: dict, for caculation of Iq, the key of this dict should include + setup_pargs: dict, for calculation of Iq, the key of this dict should include 'center': beam center 'dpix': pixel size 'lambda_': X-ray wavelength @@ -39,7 +42,9 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): good_start = 5 FD = Multifile(filename, good_start, N) Nimg = FD.end - FD.beg - time_edge = create_time_slice(Nimg, slice_num=slice_num, slice_width=slice_width, edges=None) + time_edge = create_time_slice( + Nimg, slice_num=slice_num, slice_width=slice_width, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) tstamp[uid] = time_edge[:, 0] * timeperframe @@ -50,7 +55,7 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): - """plot q2~Iq at differnt time""" + """plot q2~Iq at different time""" if ax is None: fig, ax = plt.subplots() q = qt @@ -72,7 +77,7 @@ def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): def plot_t_iqc_uids(qs, iqsts, tstamps): - """plot q2~Iq at differnt time for a uid list""" + """plot q2~Iq at different time for a uid list""" keys = list(qs.keys()) fig, ax = plt.subplots() for uid in keys: @@ -102,11 +107,11 @@ def plot_entries_from_csvlist( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list csvs + plot entries for a list csvs Input: csv_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -248,11 +253,11 @@ def plot_entries_from_uids( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list uids + plot entries for a list uids Input: uid_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -304,7 +309,9 @@ def plot_entries_from_uids( filename = "uid=%s_Res.h5" % uid_dict[u] else: filename = filename_list[i] - total_res = extract_xpcs_results_from_h5(filename=filename, import_dir=inDiru, exclude_keys=["g12b"]) + total_res = extract_xpcs_results_from_h5( + filename=filename, import_dir=inDiru, exclude_keys=["g12b"] + ) if key == "g2": d = total_res[key][1:, qth] taus = total_res["taus"][1:] @@ -384,10 +391,10 @@ def plot_entries_from_uids( def get_iq_from_uids(uids, mask, setup_pargs): """Y.G. developed July 17, 2017 @CHX - Get q-Iq of a uids dict, each uid could corrrespond one frame or a time seriers + Get q-Iq of a uids dict, each uid could correspond one frame or a time seriers uids: dict, val: meaningful decription, key: a list of uids mask: bool-type 2D array - setup_pargs: dict, at least should contains, the following paramters for calculation of I(q) + setup_pargs: dict, at least should contains, the following parameters for calculation of I(q) 'Ldet': 4917.50495, 'center': [988, 1120], @@ -442,7 +449,9 @@ def get_iq_from_uids(uids, mask, setup_pargs): setup_pargs["uid"] = uidstr - qp_saxs, iq_saxs, q_saxs = get_circular_average(avg_img, mask, pargs=setup_pargs, save=True) + qp_saxs, iq_saxs, q_saxs = get_circular_average( + avg_img, mask, pargs=setup_pargs, save=True + ) if n == 0: iqs = np.zeros([len(q_saxs), Nuid + 1]) iqs[:, 0] = q_saxs @@ -480,8 +489,8 @@ def wait_func(wait_time=2): # print( 'Starting to do something here...') -def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): - """check the completion of a data uid acquistion +def wait_data_acquisition_finish(uid, wait_time=2, max_try_num=3): + """check the completion of a data uid acquisition Parameter: uid: wait_time: the waiting step in unit of second @@ -500,14 +509,14 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): try: get_meta_data(uid) FINISH = True - print("The data acquistion finished.") + print("The data acquisition finished.") print("Starting to do something here...") except: wait_func(wait_time=wait_time) w += 1 print("Try number: %s" % w) if w > max_try_num: - print("There could be something going wrong with data acquistion.") + print("There could be something going wrong with data acquisition.") print("Force to terminate after %s tries." % w) FINISH = True Fake_FINISH = False @@ -517,7 +526,7 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): def get_uids_by_range(start_uidth=-1, end_uidth=0): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end uid number, i.e. -10, -1 + A wrap function to find uids by giving start and end uid number, i.e. -10, -1 Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -540,7 +549,7 @@ def get_uids_by_range(start_uidth=-1, end_uidth=0): def get_uids_in_time_period(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -561,7 +570,9 @@ def get_uids_in_time_period(start_time, stop_time): return np.array(uids), np.array(fuids) -def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3): +def do_compress_on_line( + start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3 +): """Y.G. Mar 10, 2017 Do on-line compress by giving start time and stop time Parameters: @@ -580,7 +591,7 @@ def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_t print("*" * 50) print("Do compress for %s now..." % uid) if db[uid]["start"]["plan_name"] == "count": - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) @@ -638,13 +649,16 @@ def realtime_xpcs_analysis( print("*" * 50) # print('Do compress for %s now...'%uid) print("Starting analysis for %s now..." % uid) - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): # if db[uid]['start']['dtype'] =='xpcs': - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) - ##corect some metadata + ##correct some metadata if md_update is not None: md.update(md_update) # if 'username' in list(md.keys()): @@ -667,7 +681,7 @@ def realtime_xpcs_analysis( except: print("There are something wrong with this data: %s..." % uid) else: - print("\nThis is not a XPCS series. We will simiply ignore it.") + print("\nThis is not a XPCS series. We will simply ignore it.") print("*" * 50) # print( 'Sleep 10 sec here!!!') @@ -694,7 +708,7 @@ def compress_multi_uids( Parameters: uids: list, a list of uid mask: bool array, mask array - force_compress: default is False, just load the compresssed data; + force_compress: default is False, just load the compressed data; if True, will compress it to overwrite the old compressed data para_compress: apply the parallel compress algorithm bin_frame_number: @@ -751,11 +765,11 @@ def compress_multi_uids( #################################################################################################### -##get_two_time_mulit_uids, sequential cal for uids, but apply parallel for each uid ## +##get_two_time_multi_uids, sequential cal for uids, but apply parallel for each uid ## ################################################################################################# -def get_two_time_mulit_uids( +def get_two_time_multi_uids( uids, roi_mask, norm=None, @@ -768,20 +782,20 @@ def get_two_time_mulit_uids( compress_path=None, ): """Calculate two time correlation by using auto_two_Arrayc func for a set of uids, - if the two-time resutls are already created, by default (force_generate=False), just pass + if the two-time results are already created, by default (force_generate=False), just pass Parameters: uids: list, a list of uid roi_mask: bool array, roi mask array norm: the normalization array path: string, where to save the two time - force_generate: default, False, if the two-time resutls are already created, just pass + force_generate: default, False, if the two-time results are already created, just pass if True, will force to calculate two-time no matter exist or not Return: None, save the two-time in as path + uid + 'uid=%s_g12b'%uid e.g., - get_two_time_mulit_uids( guids, roi_mask, norm= norm,bin_frame_number=1, + get_two_time_multi_uids( guids, roi_mask, norm= norm,bin_frame_number=1, path= data_dir,force_generate=False ) """ @@ -815,13 +829,20 @@ def get_two_time_mulit_uids( if not force_generate: if os.path.exists(filename + ".npy"): doit = False - print("The two time correlation function for uid=%s is already calculated. Just pass..." % uid) + print( + "The two time correlation function for uid=%s is already calculated. Just pass..." + % uid + ) if doit: data_pixel = Get_Pixel_Arrayc(FD, pixelist, norm=norm).get_data() g12b = auto_two_Arrayc(data_pixel, roi_mask, index=None) np.save(filename, g12b) del g12b - print("The two time correlation function for uid={} is saved as {}.".format(uid, filename)) + print( + "The two time correlation function for uid={} is saved as {}.".format( + uid, filename + ) + ) def get_series_g2_from_g12( @@ -843,7 +864,7 @@ def get_series_g2_from_g12( will use g12b length to replace this number by default is None, will = [ g12b.shape[0] ] dose_label: the label of each dose, also is the keys of returned g2, lag - log_taus: if true, will only return a g2 with the correponding tau values + log_taus: if true, will only return a g2 with the corresponding tau values as calculated by multi-tau defined taus Return: @@ -865,18 +886,22 @@ def get_series_g2_from_g12( # print( good_end ) if good_end > L: warnings.warn( - "Warning: the dose value is too large, and please check the maxium dose in this data set and give a smaller dose value. We will use the maxium dose of the data." + "Warning: the dose value is too large, and please check the maximum dose in this data set and give a smaller dose value. We will use the maximum dose of the data." ) good_end = L if not log_taus: - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :]) + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + ) else: # print( good_end, num_bufs ) lag_step = get_multi_tau_lag_steps(good_end, num_bufs) lag_step = lag_step[lag_step < good_end - good_start] # print( len(lag_steps ) ) lag_steps[key] = lag_step * time_step - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :])[lag_step] + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + )[lag_step] return lag_steps, g2 @@ -885,10 +910,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -901,7 +926,7 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): return np.int_(np.array(exp_dose) / (exp_time + dead_time) / att) -def get_series_one_time_mulit_uids( +def get_series_one_time_multi_uids( uids, qval_dict, trans=None, @@ -915,7 +940,7 @@ def get_series_one_time_mulit_uids( imgs=None, direct_load_data=False, ): - """Calculate a dose depedent series of one time correlations from two time + """Calculate a dose dependent series of one time correlations from two time Parameters: uids: list, a list of uid trans: list, same length as uids, the transmission list @@ -934,7 +959,9 @@ def get_series_one_time_mulit_uids( """ if path is None: - print("Please calculate two time function first by using get_two_time_mulit_uids function.") + print( + "Please calculate two time function first by using get_two_time_multi_uids function." + ) else: taus_uids = {} g2_uids = {} @@ -1120,20 +1147,22 @@ def plot_dose_g2( # return taus_dict, g2_dict -def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False): +def run_xpcs_xsvs_single( + uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False +): """Y.G. Dec 22, 2016 Run XPCS XSVS analysis for a single uid Parameters: uid: unique id run_pargs: dict, control run type and setup parameters, such as q range et.al. - reverse:,True, revserse the image upside down + reverse:,True, reverse the image upside down Return: save analysis result to csv/png/h5 files return_res: if true, return a dict, containing g2,g4,g12,contrast et.al. depending on the run type An example for the run_pargs: run_pargs= dict( - scat_geometry = 'gi_saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) + scat_geometry = 'gi_saxs' #support 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) force_compress = True,#False, para_compress = True, run_fit_form = False, @@ -1301,7 +1330,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if md["detector"] == "eiger1m_single_image": Chip_Mask = np.load("/XF11ID/analysis/2017_1/masks/Eiger1M_Chip_Mask.npy") elif md["detector"] == "eiger4m_single_image" or md["detector"] == "image": - Chip_Mask = np.array(np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool) + Chip_Mask = np.array( + np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool + ) BadPix = np.load("/XF11ID/analysis/2018_1/BadPix_4M.npy") Chip_Mask.ravel()[BadPix] = 0 elif md["detector"] == "eiger500K_single_image": @@ -1358,7 +1389,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) # print_dict( setup_pargs ) - mask = load_mask(mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse) + mask = load_mask( + mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse + ) mask *= pixel_mask if md["detector"] == "eiger4m_single_image": mask[:, 2069] = 0 # False #Concluded from the previous results @@ -1401,7 +1434,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= photon_occ = len(np.where(avg_img)[0]) / (imgsa[0].size) # compress = photon_occ < .4 #if the photon ocupation < 0.5, do compress print("The non-zeros photon occupation is %s." % (photon_occ)) - print("Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process.") + print( + "Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process." + ) # good_start = 5 #make the good_start at least 0 t0 = time.time() filename = "/XF11ID/analysis/Compressed_Data" + "/uid_%s.cmp" % md["uid"] @@ -1426,7 +1461,15 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) print(uid_) plot1D( - y=imgsum[np.array([i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])], + y=imgsum[ + np.array( + [ + i + for i in np.arange(good_start, len(imgsum)) + if i not in bad_frame_list + ] + ) + ], title=uidstr + "_imgsum", xlabel="Frame", ylabel="Total_Intensity", @@ -1437,7 +1480,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= mask = mask * Chip_Mask # %system free && sync && echo 3 > /proc/sys/vm/drop_caches && free - ## Get bad frame list by a polynominal fit + ## Get bad frame list by a polynomial fit bad_frame_list = get_bad_frame_list( imgsum, fit=True, @@ -1467,7 +1510,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= cmap=cmap_albula, ) - imgsum_y = imgsum[np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list])] + imgsum_y = imgsum[ + np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list]) + ] imgsum_x = np.arange(len(imgsum_y)) save_lists( [imgsum_x, imgsum_y], @@ -1560,10 +1605,14 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if scat_geometry != "ang_saxs": Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) - qpt, iqst, qt = get_t_iqc(FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500) + qpt, iqst, qt = get_t_iqc( + FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500 + ) plot_t_iqc( qt, iqst, @@ -1618,7 +1667,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= path=data_dir, uid=uidstr, ) - qr_1d_pds = cal_1d_qr(avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs) + qr_1d_pds = cal_1d_qr( + avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs + ) plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique(np.array(list(qval_dict.values()))[:, 0]), @@ -1629,9 +1680,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start - qrt_pds = get_t_qrc(FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr) + qrt_pds = get_t_qrc( + FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr + ) plot_qrt_pds(qrt_pds, time_edge, qz_index=0, uid=uidstr, path=data_dir) ############################## @@ -1646,7 +1701,11 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= save=True, path=data_dir, ) - if scat_geometry == "saxs" or scat_geometry == "gi_saxs" or scat_geometry == "gi_waxs": + if ( + scat_geometry == "saxs" + or scat_geometry == "gi_saxs" + or scat_geometry == "gi_waxs" + ): if run_waterfall: wat = cal_waterfallc( FD, @@ -1673,7 +1732,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= times_roi, mean_int_sets = cal_each_ring_mean_intensityc( FD, roi_mask, timeperframe=None, multi_cor=True ) - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir + ) roi_avg = np.average(mean_int_sets, axis=0) uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) @@ -2108,7 +2169,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) if run_dose: - get_two_time_mulit_uids( + get_two_time_multi_uids( [uid], roi_mask, norm=norm, @@ -2127,7 +2188,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= dose_frame = np.int_([N / 8, N / 4, N / 2, 3 * N / 4, N * 0.99]) # N/32, N/16, N/8, N/4 ,N/2, 3*N/4, N*0.99 exposure_dose = tr * exposuretime * dose_frame - taus_uids, g2_uids = get_series_one_time_mulit_uids( + taus_uids, g2_uids = get_series_one_time_multi_uids( [uid], qval_dict, good_start=good_start, @@ -2155,7 +2216,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= append_name="", ) - # Speckel Visiblity + # Speckel Visibility if run_xsvs: max_cts = get_max_countc(FD, roi_mask) qind, pixelist = roi.extract_label_indices(roi_mask) @@ -2164,7 +2225,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # time_steps = np.array( utils.geometric_series(2, len(imgs) ) ) time_steps = [0, 1] # only run the first two levels num_times = len(time_steps) - times_xsvs = exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + times_xsvs = ( + exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + ) print("The max counts are: %s" % max_cts) ### Do historam @@ -2457,12 +2520,18 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= Exdt["mean_int_sets"] = mean_int_sets if run_one_time: if scat_geometry != "ang_saxs": - for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): + for k, v in zip( + ["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras] + ): Exdt[k] = v else: - for k, v in zip(["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v]): + for k, v in zip( + ["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v] + ): Exdt[k] = v - for k, v in zip(["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p]): + for k, v in zip( + ["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p] + ): Exdt[k] = v if run_two_time: for k, v in zip( @@ -2486,7 +2555,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ): Exdt[k] = v - export_xpcs_results_to_h5("uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt) + export_xpcs_results_to_h5( + "uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt + ) # extract_dict = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%md['uid'], import_dir = data_dir ) # Creat PDF Report pdf_out_dir = os.path.join("/XF11ID/analysis/", CYCLE, username, "Results/") @@ -2521,7 +2592,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= pname = pdf_out_dir + pdf_filename atch = [Attachment(open(pname, "rb"))] try: - update_olog_uid(uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch) + update_olog_uid( + uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch + ) except: print( "I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file." diff --git a/pyCHX/v2/_commonspeckle/movie_maker.py b/pyCHX/v2/_commonspeckle/movie_maker.py index bade9de..87240e4 100644 --- a/pyCHX/v2/_commonspeckle/movie_maker.py +++ b/pyCHX/v2/_commonspeckle/movie_maker.py @@ -23,7 +23,7 @@ def select_regoin( defined by verts e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end (dimy, dimx,) = img.shape - Giving cut postion, start, end, width""" + Giving cut position, start, end, width""" import numpy as np xs, xe, ys, ye = vert @@ -67,7 +67,6 @@ def save_png_series( dpi=100, ): import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -154,7 +153,6 @@ def movie_maker( ): import matplotlib.animation as animation import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -219,7 +217,9 @@ def movie_maker( # print( cmap, vmin, vmax ) if not logs: - im = ax.imshow(i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax) + im = ax.imshow( + i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax + ) else: im = ax.imshow( i0, @@ -230,7 +230,9 @@ def movie_maker( ) # ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18) - ttl = ax.text(0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18) + ttl = ax.text( + 0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18 + ) # print asp # fig.set_size_inches( [5., 5 * asp] ) diff --git a/pyCHX/v2/_commonspeckle/xpcs_timepixel.py b/pyCHX/v2/_commonspeckle/xpcs_timepixel.py index 6c594a9..7f4137d 100644 --- a/pyCHX/v2/_commonspeckle/xpcs_timepixel.py +++ b/pyCHX/v2/_commonspeckle/xpcs_timepixel.py @@ -10,53 +10,27 @@ import numpy as np import pandas as pds from numpy import ( - apply_over_axes, arange, - arctan, - around, - array, digitize, dot, - exp, histogram, - histogramdd, hstack, hypot, indices, int_, intersect1d, linspace, - load, - log, - log10, - ma, - mean, - mgrid, - ones, - pi, - poly1d, - polyfit, - power, - ravel, - reshape, round, save, - shape, - sin, - sqrt, - std, - sum, - unique, - vstack, where, zeros, zeros_like, ) -from numpy.linalg import lstsq from tqdm import tqdm -from pyCHX.v2._commonspeckle.chx_compress import Multifile, go_through_FD, pass_FD # common -from pyCHX.v2._commonspeckle.chx_libs import multi_tau_lags # common #TODO if keep, import from skbeam +from pyCHX.v2._commonspeckle.chx_libs import ( + multi_tau_lags, +) # common #TODO if keep, import from skbeam def get_timepixel_data(data_dir, filename, time_unit=1): @@ -238,7 +212,9 @@ def compress_timepix_data( with_pickle=with_pickle, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) return pkl.load(open(filename + ".pkl", "rb")) # FD = Multifile(filename, 0, int(1e25) ) @@ -277,7 +253,9 @@ def create_timepix_compress_header(md, filename, nobytes=2, bins=1): fp.close() -def init_compress_timepix_data(pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True): +def init_compress_timepix_data( + pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True +): """YG.Dev@CHX Nov 19, 2017 with optimal algorithm by using complex index techniques Compress the timepixeldata, in a format of x, y, t @@ -617,7 +595,9 @@ def apply_timepix_mask(x, y, t, roi): return x[w], y[w], t[w] -def get_timepixel_data_from_series(data_dir, filename_prefix, total_filenum=72, colms=int(1e5)): +def get_timepixel_data_from_series( + data_dir, filename_prefix, total_filenum=72, colms=int(1e5) +): x = np.zeros(total_filenum * colms) y = np.zeros(total_filenum * colms) t = zeros(total_filenum * colms) @@ -753,14 +733,14 @@ def read_xyt_frame(n=1): def readframe_series(n=1): - """Using this universe name for all the loading fucntions""" + """Using this universe name for all the loading functions""" return read_xyt_frame(n) class xpcs(object): def __init__(self): """DOCUMENT __init__( ) - the initilization of the XPCS class + the initialization of the XPCS class """ self.version = "version_0" self.create_time = "July_14_2015" @@ -822,7 +802,7 @@ def make_qlist(self): def calqlist(self, qmask=None, shape="circle"): """DOCUMENT calqlist( qmask=,shape=, ) - calculate the equvilent pixel with a shape, + calculate the equivalent pixel with a shape, return qind: the index of q pixellist: the list of pixle diff --git a/pyCHX/v2/_futurepyCHX/Badpixels.py b/pyCHX/v2/_futurepyCHX/Badpixels.py index c90714a..ac619e7 100644 --- a/pyCHX/v2/_futurepyCHX/Badpixels.py +++ b/pyCHX/v2/_futurepyCHX/Badpixels.py @@ -1,4 +1,5 @@ """Dev@Octo12,2017""" + import numpy as np damaged_4Mpixel = np.array( @@ -89,7 +90,9 @@ 4155535, ] ), # 57 points, coralpor - "6cc34a": np.array([1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535]), # coralpor + "6cc34a": np.array( + [1058942, 2105743, 2105744, 2107813, 2107815, 2109883, 4155535] + ), # coralpor } diff --git a/pyCHX/v2/_futurepyCHX/Compress_readerNew.py b/pyCHX/v2/_futurepyCHX/Compress_readerNew.py index 8d69158..6f83ee5 100644 --- a/pyCHX/v2/_futurepyCHX/Compress_readerNew.py +++ b/pyCHX/v2/_futurepyCHX/Compress_readerNew.py @@ -54,7 +54,7 @@ def __init__(self, filename, mode="rb", nbytes=2): numimgs: num images """ if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode @@ -123,7 +123,9 @@ def index(self): def _read_header(self, n): """Read header from current seek position.""" if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # read in bytes cur = self.frame_indexes[n] header_raw = self._fd[cur : cur + self.HEADER_SIZE] @@ -146,7 +148,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) cur = self.frame_indexes[n] + 1024 dlen = self._read_header(n)["dlen"] @@ -212,7 +216,7 @@ def __init__(self, filename, mode="rb"): raise ValueError("Write mode 'wb' not supported yet") if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode @@ -307,7 +311,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # dlen is 4 bytes cur = self.frame_indexes[n] dlen = np.frombuffer(self._fd[cur : cur + 4], dtype=" %s & y -> %s| Shutter Mode: %s" @@ -561,9 +559,11 @@ def report_meta(self, top=740, new_page=False): ) elif self.report_type == "gi_saxs": s7 = ( - "Incident Center: [%s, %s] (pixel)" % (md["beam_center_x"], md["beam_center_y"]) + "Incident Center: [%s, %s] (pixel)" + % (md["beam_center_x"], md["beam_center_y"]) + " || " - + "Reflect Center: [%s, %s] (pixel)" % (md["beam_refl_center_x"], md["beam_refl_center_y"]) + + "Reflect Center: [%s, %s] (pixel)" + % (md["beam_refl_center_x"], md["beam_refl_center_y"]) ) elif self.report_type == "ang_saxs" or self.report_type == "gi_waxs": s7 = "Beam Center: [%s, %s] (pixel)" % ( @@ -576,7 +576,11 @@ def report_meta(self, top=740, new_page=False): s7 += " || " + "BadLen: %s" % len(md["bad_frame_list"]) s7 += " || " + "Transmission: %s" % md["transmission"] s.append(s7) ####line 7 'Beam center... - m = "Mask file: %s" % md["mask_file"] + " || " + "ROI mask file: %s" % md["roi_mask_file"] + m = ( + "Mask file: %s" % md["mask_file"] + + " || " + + "ROI mask file: %s" % md["roi_mask_file"] + ) # s.append( 'Mask file: %s'%md['mask_file'] ) ####line 8 mask filename # s.append( ) ####line 8 mask filename s.append(m) @@ -587,9 +591,13 @@ def report_meta(self, top=740, new_page=False): self.data_dir_ = self.data_dir s.append("Analysis Results Dir: %s" % self.data_dir_) ####line 9 results folder - s.append("Metadata Dir: %s.csv-&.pkl" % self.metafile) ####line 10 metadata folder + s.append( + "Metadata Dir: %s.csv-&.pkl" % self.metafile + ) ####line 10 metadata folder try: - s.append("Pipeline notebook: %s" % md["NOTEBOOK_FULL_PATH"]) ####line 11 notebook folder + s.append( + "Pipeline notebook: %s" % md["NOTEBOOK_FULL_PATH"] + ) ####line 11 notebook folder except: pass # print( 'here' ) @@ -730,7 +738,11 @@ def report_ROI(self, top=300, new_page=False): ) # add q_Iq - if self.report_type == "saxs" or self.report_type == "gi_saxs" or self.report_type == "ang_saxs": + if ( + self.report_type == "saxs" + or self.report_type == "gi_saxs" + or self.report_type == "ang_saxs" + ): imgf = self.ROI_on_Iq_file img_height = 180 img_left, img_top = 320, top - ds @@ -772,7 +784,9 @@ def report_time_analysis(self, top=720, new_page=False): top1 = top ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Time Dependent Plot" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Time Dependent Plot" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top = top1 - 160 @@ -943,7 +957,9 @@ def report_oavs(self, top=350, oavs_file=None, new_page=False): # print( imgf,self.data_dir ) print(img_width, img_height) - def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page=False): + def report_one_time( + self, top=350, g2_fit_file=None, q_rate_file=None, new_page=False + ): """create the one time correlation function report Two images: One Time Correlation Function with fit @@ -957,7 +973,9 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= c.setFont("Helvetica", 20) ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. One Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. One Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) # add g2 plot if g2_fit_file is None: @@ -974,7 +992,7 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= img_height = 550 top = top - 600 str2_left, str2_top = 80, top - 400 - # add one_time caculation + # add one_time calculation img_left, img_top = 1, top if self.g2_fit_new_page or self.g2_new_page: img_height = 550 @@ -1129,8 +1147,8 @@ def report_one_time(self, top=350, g2_fit_file=None, q_rate_file=None, new_page= c.showPage() c.save() - def report_mulit_one_time(self, top=720, new_page=False): - """create the mulit one time correlation function report + def report_multi_one_time(self, top=720, new_page=False): + """create the multi one time correlation function report Two images: One Time Correlation Function with fit q-rate fit @@ -1142,7 +1160,9 @@ def report_mulit_one_time(self, top=720, new_page=False): c.setFont("Helvetica", 20) ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. One Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. One Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) # add g2 plot top = top - 320 @@ -1172,7 +1192,9 @@ def report_mulit_one_time(self, top=720, new_page=False): im = Image.open(image) ratio = float(im.size[1]) / im.size[0] height = 180 - c.drawImage(image, 350, top, width=height / ratio, height=height, mask="auto") + c.drawImage( + image, 350, top, width=height / ratio, height=height, mask="auto" + ) c.setFont("Helvetica", 16) c.setFillColor(blue) @@ -1198,7 +1220,9 @@ def report_two_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Two Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Two Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1304,7 +1328,9 @@ def report_four_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Four Time Correlation Function" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Four Time Correlation Function" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1411,7 +1437,9 @@ def report_flow_pv_g2(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Flow One Time Analysis" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Flow One Time Analysis" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1515,7 +1543,9 @@ def report_flow_pv_two_time(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Flow One &Two Time Comparison" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Flow One &Two Time Comparison" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top1 = top @@ -1640,7 +1670,9 @@ def report_xsvs(self, top=720, new_page=False): ds = 20 self.sub_title_num += 1 - c.drawString(10, top, "%s. Visibility Analysis" % self.sub_title_num) # add title + c.drawString( + 10, top, "%s. Visibility Analysis" % self.sub_title_num + ) # add title c.setFont("Helvetica", 14) top = top - 330 # add xsvs fit @@ -1654,7 +1686,7 @@ def report_xsvs(self, top=720, new_page=False): img_height=300, str1_left=210, str1_top=top + 300, - str1="XSVS_Fit_by_Negtive_Binomal Function", + str1="XSVS_Fit_by_Negative_Binomal Function", str2_left=180, str2_top=top - 10, ) @@ -1688,7 +1720,7 @@ def report_xsvs(self, top=720, new_page=False): c.drawImage(image, 100, top, width=height / ratio, height=height, mask=None) c.setFont("Helvetica", 16) c.setFillColor(blue) - c.drawString(210, top + 300, "XSVS_Fit_by_Negtive_Binomal Function") + c.drawString(210, top + 300, "XSVS_Fit_by_Negative_Binomal Function") c.setFont("Helvetica", 12) c.setFillColor(red) c.drawString(180, top - 10, "filename: %s" % imgf) @@ -1730,7 +1762,9 @@ def done(self): print("*" * 40) -def create_multi_pdf_reports_for_uids(uids, g2, data_dir, report_type="saxs", append_name=""): +def create_multi_pdf_reports_for_uids( + uids, g2, data_dir, report_type="saxs", append_name="" +): """Aug 16, YG@CHX-NSLS-II Create multi pdf reports for each uid in uids uids: a list of uids to be reported @@ -1765,7 +1799,9 @@ def create_multi_pdf_reports_for_uids(uids, g2, data_dir, report_type="saxs", ap c.done() -def create_one_pdf_reports_for_uids(uids, g2, data_dir, filename="all_in_one", report_type="saxs"): +def create_one_pdf_reports_for_uids( + uids, g2, data_dir, filename="all_in_one", report_type="saxs" +): """Aug 16, YG@CHX-NSLS-II Create one pdf reports for each uid in uids uids: a list of uids to be reported @@ -1773,7 +1809,9 @@ def create_one_pdf_reports_for_uids(uids, g2, data_dir, filename="all_in_one", r data_dir: Save pdf report in data dir """ - c = create_pdf_report(data_dir, uid=filename, out_dir=data_dir, load=False, report_type=report_type) + c = create_pdf_report( + data_dir, uid=filename, out_dir=data_dir, load=False, report_type=report_type + ) page = 1 for key in list(g2.keys()): @@ -1802,9 +1840,9 @@ def save_res_h5(full_uid, data_dir, save_two_time=False): YG. Nov 10, 2016 save the results to a h5 file will save meta data/avg_img/mask/roi (ring_mask or box_mask)/ - will aslo save multi-tau calculated one-time correlation function g2/taus + will also save multi-tau calculated one-time correlation function g2/taus will also save two-time derived one-time correlation function /g2b/taus2 - if save_two_time if True, will save two-time correaltion function + if save_two_time if True, will save two-time correlation function """ with h5py.File(data_dir + "%s.h5" % full_uid, "w") as hf: # write meta data @@ -1838,9 +1876,9 @@ def load_res_h5(full_uid, data_dir): """YG. Nov 10, 2016 load results from a h5 file will load meta data/avg_img/mask/roi (ring_mask or box_mask)/ - will aslo load multi-tau calculated one-time correlation function g2/taus + will also load multi-tau calculated one-time correlation function g2/taus will also load two-time derived one-time correlation function /g2b/taus2 - if save_two_time if True, will load two-time correaltion function + if save_two_time if True, will load two-time correlation function """ with h5py.File(data_dir + "%s.h5" % full_uid, "r") as hf: @@ -2018,11 +2056,15 @@ def recursively_save_dict_contents_to_group(h5file, path, dic): if not isinstance(key, str): raise ValueError("dict keys must be strings to save to hdf5") # save strings, numpy.int64, and numpy.float64 types - if isinstance(item, (np.int64, np.float64, str, np.float, float, np.float32, int)): + if isinstance( + item, (np.int64, np.float64, str, np.float, float, np.float32, int) + ): # print( 'here' ) h5file[path + key] = item if not h5file[path + key].value == item: - raise ValueError("The data representation in the HDF5 file does not match the original dict.") + raise ValueError( + "The data representation in the HDF5 file does not match the original dict." + ) # save numpy arrays elif isinstance(item, np.ndarray): try: @@ -2031,7 +2073,9 @@ def recursively_save_dict_contents_to_group(h5file, path, dic): item = np.array(item).astype("|S9") h5file[path + key] = item if not np.array_equal(h5file[path + key].value, item): - raise ValueError("The data representation in the HDF5 file does not match the original dict.") + raise ValueError( + "The data representation in the HDF5 file does not match the original dict." + ) # save dictionaries elif isinstance(item, dict): recursively_save_dict_contents_to_group(h5file, path + key + "/", item) @@ -2048,7 +2092,9 @@ def recursively_load_dict_contents_from_group(h5file, path): if isinstance(item, h5py._hl.dataset.Dataset): ans[key] = item.value elif isinstance(item, h5py._hl.group.Group): - ans[key] = recursively_load_dict_contents_from_group(h5file, path + key + "/") + ans[key] = recursively_load_dict_contents_from_group( + h5file, path + key + "/" + ) return ans @@ -2083,7 +2129,9 @@ def export_xpcs_results_to_h5(filename, export_dir, export_dict): elif key in dict_nest: # print(key) try: - recursively_save_dict_contents_to_group(hf, "/%s/" % key, export_dict[key]) + recursively_save_dict_contents_to_group( + hf, "/%s/" % key, export_dict[key] + ) except: print("Can't export the key: %s in this dataset." % key) @@ -2121,10 +2169,15 @@ def export_xpcs_results_to_h5(filename, export_dir, export_dict): mode="a", ) - print("The xpcs analysis results are exported to %s with filename as %s" % (export_dir, filename)) + print( + "The xpcs analysis results are exported to %s with filename as %s" + % (export_dir, filename) + ) -def extract_xpcs_results_from_h5_debug(filename, import_dir, onekey=None, exclude_keys=None): +def extract_xpcs_results_from_h5_debug( + filename, import_dir, onekey=None, exclude_keys=None +): """ YG. Dec 22, 2016 extract data from a h5 file @@ -2153,7 +2206,9 @@ def extract_xpcs_results_from_h5_debug(filename, import_dir, onekey=None, exclud for key in list(hf.keys()): if key not in exclude_keys: if key in dicts: - extract_dict[key] = recursively_load_dict_contents_from_group(hf, "/" + key + "/") + extract_dict[key] = recursively_load_dict_contents_from_group( + hf, "/" + key + "/" + ) elif key in [ "g2_fit_paras", "g2b_fit_paras", @@ -2235,10 +2290,15 @@ def export_xpcs_results_to_h5_old(filename, export_dir, export_dict): ) else: data = hf.create_dataset(key, data=export_dict[key]) - print("The xpcs analysis results are exported to %s with filename as %s" % (export_dir, filename)) + print( + "The xpcs analysis results are exported to %s with filename as %s" + % (export_dir, filename) + ) -def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys=None, two_time_qindex=None): +def extract_xpcs_results_from_h5( + filename, import_dir, onekey=None, exclude_keys=None, two_time_qindex=None +): """ YG. Dec 22, 2016 extract data from a h5 file @@ -2250,7 +2310,6 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys extact_dict: dict, with keys as md, g2, g4 et.al. """ - import numpy as np import pandas as pds extract_dict = {} @@ -2290,7 +2349,9 @@ def extract_xpcs_results_from_h5(filename, import_dir, onekey=None, exclude_keys else: extract_dict[key] = hf.get(key)[:] else: - extract_dict[key] = hf.get(key)[:] # np.array( hf.get( key )) + extract_dict[key] = hf.get(key)[ + : + ] # np.array( hf.get( key )) for key in pds_type_keys: if key not in exclude_keys: diff --git a/pyCHX/v2/_futurepyCHX/DEVs.py b/pyCHX/v2/_futurepyCHX/DEVs.py index 19fd4e5..35aa061 100644 --- a/pyCHX/v2/_futurepyCHX/DEVs.py +++ b/pyCHX/v2/_futurepyCHX/DEVs.py @@ -1,7 +1,6 @@ # simple brute force multitau # from pyCHX.chx_generic_functions import average_array_withNan import numpy as np -import skbeam.core.roi as roi from numpy.fft import fft, ifft from tqdm import tqdm @@ -18,7 +17,7 @@ def fit_one_peak_curve(x, y, fit_range): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -164,7 +163,7 @@ def get_oneQ_g2_fft(time_inten_oneQ, axis=0): Input: time_inten_oneQ: 2d-array, shape=[time, pixel number in the ROI], a time dependent intensity for a list of pixels - ( the equivilent pixels belongs to one Q ) + ( the equivalent pixels belongs to one Q ) Return: G/(P*F) """ @@ -202,7 +201,7 @@ def get_g2_PF(time_inten): def auto_correlation_fft_padding_zeros(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -228,7 +227,8 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): # print(M, N, 2*N-1) cor = np.real( ifft( - fft(a, n=N * 2 - 1, axis=axis) * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), + fft(a, n=N * 2 - 1, axis=axis) + * np.conjugate(fft(a, n=N * 2 - 1, axis=axis)), n=N * 2 - 1, axis=axis, ) @@ -246,7 +246,7 @@ def auto_correlation_fft_padding_zeros(a, axis=-1): def auto_correlation_fft(a, axis=-1): - """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of ND array by fft + """Y.G. Dev@CHX, 2018/10/15 Do autocorelation of AND array by fft Math: Based on auto_cor(arr) = ifft( fft( arr ) * fft(arr[::-1]) ) In numpy form @@ -307,7 +307,7 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): / noperbin ) G2[j, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t - for l in tqdm(np.arange(1, lvl), desc="Calcuate g2..."): + for l in tqdm(np.arange(1, lvl), desc="Calculate g2..."): nn = dII.shape[0] // 2 * 2 # make it even dII = (dII[0:nn:2, :] + dII[1:nn:2, :]) / 2.0 # sum in pairs nn = nn // 2 @@ -321,7 +321,9 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): * np.bincount(bind, np.mean(dII[:-j, :], axis=0)) / noperbin ) - G2[ind, :] = np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + G2[ind, :] = ( + np.bincount(bind, np.mean(dII[j:, :] * dII[:-j, :], axis=0)) / t + ) # print(ind) # print(time.time()-t0) return (tt[: ind + 1], G2[: ind + 1, :]) @@ -329,10 +331,10 @@ def multitau(Ipix, bind, lvl=12, nobuf=8): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -353,7 +355,9 @@ def average_array_withNan(array, axis=0, mask=None): return sums / cts -def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True): +def autocor_for_pix_time( + pix_time_data, dly_dict, pixel_norm=None, frame_norm=None, multi_tau_method=True +): """YG Feb 20, 2018@CHX Do correlation for pixel_time type data with tau as defined as dly Input: @@ -373,7 +377,7 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No Gp = np.zeros([Ntau, Np]) Gf = np.zeros([Ntau, Np]) # mask_pix = np.isnan(pix_time_data) - # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calcuate g2...' ): + # for tau_ind, tau in tqdm( enumerate(dly), desc= 'Calculate g2...' ): tau_ind = 0 # if multi_tau_method: pix_time_datac = pix_time_data.copy() @@ -383,14 +387,18 @@ def autocor_for_pix_time(pix_time_data, dly_dict, pixel_norm=None, frame_norm=No if frame_norm is not None: pix_time_datac /= frame_norm - for tau_lev, tau_key in tqdm(enumerate(list(dly_dict.keys())), desc="Calcuate g2..."): + for tau_lev, tau_key in tqdm( + enumerate(list(dly_dict.keys())), desc="Calculate g2..." + ): # print(tau_key) taus = dly_dict[tau_key] if multi_tau_method: if tau_lev > 0: nobuf = len(dly_dict[1]) nn = pix_time_datac.shape[0] // 2 * 2 # make it even - pix_time_datac = (pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :]) / 2.0 # sum in pairs + pix_time_datac = ( + pix_time_datac[0:nn:2, :] + pix_time_datac[1:nn:2, :] + ) / 2.0 # sum in pairs nn = nn // 2 if nn < nobuf: break @@ -450,7 +458,6 @@ def autocor_xytframe(self, n): ###################For Fit import matplotlib.pyplot as plt -import numpy as np from scipy.optimize import leastsq # duplicate my curfit function from yorick, except use sigma and not w @@ -465,7 +472,9 @@ def curfit(x, y, a, sigy=None, function_name=None, adj=None): function_name = funct # print( a, adj, a[adj] ) # print(x,y,a) - afit, cv, idt, m, ie = leastsq(_residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True) + afit, cv, idt, m, ie = leastsq( + _residuals, a[adj], args=(x, y, sigy, a, adj, function_name), full_output=True + ) a[adj] = afit realcv = np.identity(afit.size) realcv[np.ix_(adj, adj)] = cv @@ -497,12 +506,15 @@ def fitpr(chisq, a, sigmaa, title=None, lbl=None): lbl = [] for i in xrange(a.size): lbl.append("A%(#)02d" % {"#": i}) - # print resuls of a fit. + # print results of a fit. if title != None: print(title) print(" chisq=%(c).4f" % {"c": chisq}) for i in range(a.size): - print(" %(lbl)8s =%(m)10.4f +/- %(s).4f" % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]}) + print( + " %(lbl)8s =%(m)10.4f +/- %(s).4f" + % {"lbl": lbl[i], "m": a[i], "s": sigmaa[i]} + ) # easy plot for fit @@ -525,7 +537,9 @@ def Gaussian(x, p): """ xo, amplitude, sigma, offset = p - g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-1 / 2.0 * (x - xo) ** 2 / sigma**2) + g = offset + amplitude * 1.0 / (sigma * np.sqrt(2 * np.pi)) * np.exp( + -1 / 2.0 * (x - xo) ** 2 / sigma**2 + ) return g @@ -564,7 +578,8 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): th_list = np.linspace(th_min, th_max, th_n + 1) r_list = np.linspace(r_min, 1, r_n + 1) regions_list = [ - [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] for _ in range(th_n) + [[np.array([], dtype=np.int_), np.array([], dtype=np.int_)] for _ in range(r_n)] + for _ in range(th_n) ] w = int(np.ceil(a * 2)) h = int(np.ceil(b * 2)) @@ -578,12 +593,18 @@ def gen_elps_sectors(a, b, r_min, r_n, th_n, c_x, c_y, th_min=0, th_max=360): cur_r = np.sqrt(cur_x**2 + cur_y**2) cur_elps_r = elps_r(a, b, cur_theta) cur_r_list = r_list * cur_elps_r - cur_theta = np.rad2deg(cur_theta) # Convert to degrees to compare with th_list + cur_theta = np.rad2deg( + cur_theta + ) # Convert to degrees to compare with th_list r_ind = place_in_interval(cur_r, cur_r_list) th_ind = place_in_interval(cur_theta, th_list) if (r_ind != -1) and (th_ind != -1): - regions_list[th_ind][r_ind][0] = np.append(regions_list[th_ind][r_ind][0], ii + x_offset) - regions_list[th_ind][r_ind][1] = np.append(regions_list[th_ind][r_ind][1], jj + y_offset) + regions_list[th_ind][r_ind][0] = np.append( + regions_list[th_ind][r_ind][0], ii + x_offset + ) + regions_list[th_ind][r_ind][1] = np.append( + regions_list[th_ind][r_ind][1], jj + y_offset + ) sectors = [] for th_reg_list in regions_list: for sector in th_reg_list: diff --git a/pyCHX/v2/_futurepyCHX/DataGonio.py b/pyCHX/v2/_futurepyCHX/DataGonio.py index 18bf602..64df1a8 100644 --- a/pyCHX/v2/_futurepyCHX/DataGonio.py +++ b/pyCHX/v2/_futurepyCHX/DataGonio.py @@ -1,19 +1,15 @@ # import sys -import os -import re # Regular expressions -import sys -import matplotlib as mpl import numpy as np # from scipy.optimize import leastsq # import scipy.special import PIL # Python Image Library (for opening PNG, etc.) -import pylab as plt -import skbeam.core.correlation as corr -import skbeam.core.roi as roi import skbeam.core.utils as utils -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) from pyCHX.chx_generic_functions import average_array_withNan @@ -151,7 +147,9 @@ def get_QPhiMap(img_shape, center): return q_map, phi_map -def get_img_qphimap(img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean"): +def get_img_qphimap( + img, q_map, phi_map, mask, bins, center, qang_range=None, statistic="mean" +): """Y.G., Dev Nov 10, 2018 Get phi_map by giving image e.g., q_map, phi_map = get_QPhiMap( mask.shape, center[::-1]) @@ -536,7 +534,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -551,7 +551,7 @@ def _generate_qxyz_maps(self): ################################################################################ class CalibrationGonio(Calibration): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -596,7 +596,7 @@ def set_angles( self.sam_chi = sam_chi self.sam_theta = sam_theta - def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): + def rotation_matrix(self, sam_phi, sam_theta, sam_chi, degrees=True): """ sam_phi, rotate along lab-frame x, CHX phi sam_chi, rotate along lab-frame z, CHX chi @@ -636,11 +636,13 @@ def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True): Rxy = np.dot(Rx, Ry) return np.dot(Rxy, Rz) - def _generate_qxyz_map_SF_from_Lab(self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True): + def _generate_qxyz_map_SF_from_Lab( + self, qx, qy, qz, sam_phi, sam_theta, sam_chi, degrees=True + ): """ Convert qmap from Lab frame to sample frame """ - self.Rot = self.rotation_matix(sam_phi, sam_theta, sam_chi, degrees=degrees) + self.Rot = self.rotation_matrix(sam_phi, sam_theta, sam_chi, degrees=degrees) qsx, qsy, qsz = np.dot(self.Rot, [np.ravel(qx), np.ravel(qy), np.ravel(qz)]) return qsx.reshape(qx.shape), qsy.reshape(qy.shape), qsz.reshape(qz.shape) @@ -662,10 +664,14 @@ def _generate_qxyz_maps_samFrame(self, degrees=True): self.sam_chi, degrees=degrees, ) - self.qr_map_lab_data = np.sqrt(np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data)) + self.qr_map_lab_data = np.sqrt( + np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) + ) self.q_map_lab_data = np.sqrt( - np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data) + np.square(self.qz_map_lab_data) + np.square(self.qx_map_lab_data) + + np.square(self.qy_map_lab_data) + + np.square(self.qz_map_lab_data) ) def get_ratioDw(self): @@ -687,9 +693,9 @@ def angle_map(self): return self.angle_map_data - def _generate_qxyz_maps_no_offest(self): + def _generate_qxyz_maps_no_offset(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D """ @@ -708,10 +714,13 @@ def _generate_qxyz_maps_no_offest(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * ( - X_c * np.cos(phi_g) - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + X_c * np.cos(phi_g) + - np.sin(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) ) qy_c = k_over_Dprime * ( - X_c * np.sin(phi_g) + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) - Dprime + X_c * np.sin(phi_g) + + np.cos(phi_g) * (d * np.cos(theta_g) - Y_c * np.sin(theta_g)) + - Dprime ) qz_c = -1 * k_over_Dprime * (d * np.sin(theta_g) + Y_c * np.cos(theta_g)) @@ -733,7 +742,9 @@ def _generate_qxyz_maps_no_offest(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data = np.sign(self.qx_map_data) * np.sqrt( @@ -747,7 +758,7 @@ def _generate_qxyz_maps_no_offest(self): def _generate_qxyz_maps(self): """ - The geometric claculations used here are described: + The geometric calculations used here are described: http://gisaxs.com/index.php/Geometry:WAXS_3D YG add offset corrections at Sep 21, 2017 @@ -791,8 +802,14 @@ def _generate_qxyz_maps(self): k_over_Dprime = self.get_k() / Dprime qx_c = k_over_Dprime * (X_c * np.cos(phi_g) - np.sin(phi_g) * yprime + offset_x) - qy_c = k_over_Dprime * (X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime) - qz_c = -1 * k_over_Dprime * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + qy_c = k_over_Dprime * ( + X_c * np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime + ) + qz_c = ( + -1 + * k_over_Dprime + * (dprime * np.sin(theta_g) + Y_c * np.cos(theta_g) + offset_z) + ) qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c)) q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c)) @@ -819,7 +836,9 @@ def _generate_qxyz_maps(self): alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians self.qx_map_data1 = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data1 = self.get_k() * (np.cos(theta_f) * np.cos(alpha_f) - 1) # TODO: Check sign + self.qy_map_data1 = self.get_k() * ( + np.cos(theta_f) * np.cos(alpha_f) - 1 + ) # TODO: Check sign self.qz_map_data1 = -1.0 * self.get_k() * np.sin(alpha_f) self.qr_map_data1 = np.sign(self.qx_map_data1) * np.sqrt( diff --git a/pyCHX/v2/_futurepyCHX/SAXS.py b/pyCHX/v2/_futurepyCHX/SAXS.py index e08b8f5..189ef25 100644 --- a/pyCHX/v2/_futurepyCHX/SAXS.py +++ b/pyCHX/v2/_futurepyCHX/SAXS.py @@ -5,9 +5,9 @@ """ # import numpy as np -from lmfit import Model, Parameter, Parameters, fit_report, minimize, report_fit -from scipy.optimize import curve_fit, least_squares, leastsq -from scipy.special import gamma, gammaln +from lmfit import Model, Parameters, minimize +from scipy.optimize import leastsq +from scipy.special import gamma from pyCHX.chx_generic_functions import find_index, plot1D, show_img @@ -92,7 +92,7 @@ def poly_sphere_form_factor_intensity( radius/R: in A sigma:sqrt root of variance in percent delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2 - fit_func: G: Guassian;S: Flory–Schulz distribution + fit_func: G: Gaussian;S: Flory–Schulz distribution Output: The form factor intensity of the polydispersed scatter """ @@ -104,7 +104,9 @@ def poly_sphere_form_factor_intensity( if sigma == 0: v = mono_sphere_form_factor_intensity(q, R, delta_rho) else: - r, rs, wt = distribution_func(radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func) + r, rs, wt = distribution_func( + radius=R, sigma=sigma, num_points=num_points, spread=spread, func=fit_func + ) for i, Ri in enumerate(r): # print(Ri, wt[i],delta_rho, rs) v += mono_sphere_form_factor_intensity(q, Ri, delta_rho) * wt[i] * rs @@ -124,7 +126,9 @@ def poly_sphere_form_factor_intensity_q2( The form factor intensity of the polydispersed scatter """ - return poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 # * scale + baseline + return ( + poly_sphere_form_factor_intensity(x, radius, sigma, delta_rho, fit_func) * x**2 + ) # * scale + baseline def find_index_old(x, x0, tolerance=None): @@ -149,7 +153,9 @@ def find_index_old(x, x0, tolerance=None): return position -def form_factor_residuals(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background @@ -209,7 +215,9 @@ def form_factor_residuals_bg( return np.sqrt(np.abs(err)) -def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -234,7 +242,9 @@ def form_factor_residuals_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", return err -def form_factor_residuals_bg_lmfit(p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere"): +def form_factor_residuals_bg_lmfit( + p, iq, q, num_points=20, spread=5, fit_func="G", form_model="poly_sphere" +): """Residuals for fit iq by spheical form factor using leastsq. p: parameters for radius, sigma, delta_rho, background """ @@ -301,7 +311,7 @@ def get_form_factor_fit_lmfit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -429,7 +439,7 @@ def get_form_factor_fit2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -522,7 +532,9 @@ def get_form_factor_fit2( ) if (len(iq_) > len(p)) and pcov is not None: - s_sq = (fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function)).sum() / (len(iq_) - len(p)) + s_sq = ( + fit_funcs(pfit, iq_, q_, num_points, spread, fit_func, function) + ).sum() / (len(iq_) - len(p)) pcov = pcov * s_sq else: pcov = np.inf @@ -572,7 +584,7 @@ def get_form_factor_fit( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -584,7 +596,10 @@ def get_form_factor_fit( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -632,7 +647,9 @@ def get_form_factor_fit( return result, q_ -def plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs): +def plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs +): if res_pargs is not None: uid = res_pargs["uid"] path = res_pargs["path"] @@ -724,7 +741,7 @@ def fit_form_factor( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -740,7 +757,9 @@ def fit_form_factor( function=function, fit_func=fit_func, ) - plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig) + plot_form_factor_with_fit( + q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig + ) return result @@ -779,7 +798,7 @@ def fit_form_factor2( Returns ------- - fit resutls: + fit results: radius sigma an example: @@ -804,7 +823,10 @@ def fit_form_factor2( elif function == "mono_sphere": mod = Model(mono_sphere_form_factor_intensity) else: - print("The %s is not supported.The supported functions include poly_sphere and mono_sphere" % function) + print( + "The %s is not supported.The supported functions include poly_sphere and mono_sphere" + % function + ) if fit_range is not None: x1, x2 = fit_range @@ -1017,7 +1039,9 @@ def show_saxs_qmap( ##Fit sphere by scipy.leastsq fit -def fit_sphere_form_factor_func(parameters, ydata, xdata, yerror=None, nonvariables=None): +def fit_sphere_form_factor_func( + parameters, ydata, xdata, yerror=None, nonvariables=None +): """##Develop by YG at July 28, 2017 @CHX This function is for fitting form factor of polyderse spherical particles by using scipy.leastsq fit diff --git a/pyCHX/v2/_futurepyCHX/Stitching.py b/pyCHX/v2/_futurepyCHX/Stitching.py index da5291f..335346d 100644 --- a/pyCHX/v2/_futurepyCHX/Stitching.py +++ b/pyCHX/v2/_futurepyCHX/Stitching.py @@ -1,6 +1,4 @@ -import os import re -import sys import matplotlib.pyplot as plt import numpy as np @@ -19,14 +17,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -82,10 +82,10 @@ def Correct_Overlap_Images_Intensities( Return: data: array, stitched image with corrected intensity dataM: dict, each value is the image with correted intensity - scale: scale for each image, the first scale=1 by defination + scale: scale for each image, the first scale=1 by definition scale_smooth: smoothed scale - Exampe: + Example: data, dataM, scale,scale_smooth = Correct_Overlap_Images_Intensities( infiles, window_length=101, polyorder=5, overlap_width=58, badpixel_width =10 ) @@ -139,7 +139,9 @@ def Correct_Overlap_Images_Intensities( mode="mirror", cval=0.0, ) - data[:, a1:a2] = d[:, b1:b2] * np.repeat(scale_smooth[i], b2 - b1, axis=0).reshape([M, b2 - b1]) + data[:, a1:a2] = d[:, b1:b2] * np.repeat( + scale_smooth[i], b2 - b1, axis=0 + ).reshape([M, b2 - b1]) dataM[i] = np.zeros_like(dataM[i - 1]) dataM[i][:, 0 : w - ow] = dataM[i - 1][:, N - w : N - ow] dataM[i][:, w - ow :] = data[:, a1:a2] @@ -171,10 +173,12 @@ def check_overlap_scaling_factor(scale, scale_smooth, i=1, filename=None, save=F fig.savefig(filename) -def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None): +def stitch_WAXS_in_Qspace( + dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, mask=None +): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit @@ -224,16 +228,22 @@ def stitch_WAXS_in_Qspace(dataM, phis, calibration, dx=0, dy=22, dz=0, dq=0.015, dM = np.rot90(dataM[i].T) D = dM.ravel() phi = phis[i] - calibration.set_angles(det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz) + calibration.set_angles( + det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz + ) calibration.clear_maps() QZ = calibration.qz_map().ravel() # [pixel_list] QX = calibration.qx_map().ravel() # [pixel_list] bins = [num_qz, num_qx] rangeq = [[qz_min, qz_max], [qx_min, qx_max]] # Nov 7,2017 using new func to qmap - remesh_data, zbins, xbins = convert_Qmap(dM, QZ, QX, bins=bins, range=rangeq, mask=mask) + remesh_data, zbins, xbins = convert_Qmap( + dM, QZ, QX, bins=bins, range=rangeq, mask=mask + ) # Normalize by the binning - num_per_bin, zbins, xbins = convert_Qmap(np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask) + num_per_bin, zbins, xbins = convert_Qmap( + np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask + ) # remesh_data, zbins, xbins = np.histogram2d(QZ, QX, bins=bins, range=rangeq, normed=False, weights=D) # Normalize by the binning @@ -251,7 +261,6 @@ def plot_qmap_in_folder(inDir): """ import pickle as cpl - from pyCHX.chx_generic_functions import show_img from pyCHX.chx_libs import cmap_vge_hdr, plt fp = get_base_all_filenames(inDir, base_filename_cut_length=-10) @@ -287,7 +296,7 @@ def plot_qmap_in_folder(inDir): def get_qmap_range(calibration, phi_min, phi_max): """YG Sep 27@SMI Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -308,7 +317,9 @@ def get_qmap_range(calibration, phi_min, phi_max): return np.array([qx_start, qx_end, qz_start, qz_end]) -def get_phi(filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif"): +def get_phi( + filename, phi_offset=0, phi_start=4.5, phi_spacing=4.0, polarity=-1, ext="_WAXS.tif" +): pattern_re = "^.+\/?([a-zA-Z0-9_]+_)(\d\d\d\d\d\d)(\%s)$" % ext # print( pattern_re ) # pattern_re='^.+\/?([a-zA-Z0-9_]+_)(\d\d\d)(\.tif)$' @@ -345,7 +356,7 @@ def get_qmap_qxyz_range( ): """YG Nov 8, 2017@CHX Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap - (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination) + (only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional definition) based on calibration on Sep 22, offset_x= 0, offset_y= 22 Input: calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio @@ -415,8 +426,8 @@ def stitch_WAXS_in_Qspace_CHX( dq=0.0008, ): """YG Octo 11, 2017 stitch waxs scattering images in qspace - dataM: the data (with corrected intensity), dict format (todolist, make array also avialable) - phis: for SMI, the rotation angle around z-aixs + dataM: the data (with corrected intensity), dict format (todolist, make array also available) + phis: for SMI, the rotation angle around z-axis For SMI dx= 0 #in pixel unit dy = 22 #in pixel unit diff --git a/pyCHX/v2/_futurepyCHX/Two_Time_Correlation_Function.py b/pyCHX/v2/_futurepyCHX/Two_Time_Correlation_Function.py index b3d7899..3f2df9f 100644 --- a/pyCHX/v2/_futurepyCHX/Two_Time_Correlation_Function.py +++ b/pyCHX/v2/_futurepyCHX/Two_Time_Correlation_Function.py @@ -5,27 +5,23 @@ ###################################################################################### -import itertools -import sys import time -from datetime import datetime import matplotlib.pyplot as plt import numpy as np import skbeam.core.roi as roi from matplotlib import gridspec from matplotlib.colors import LogNorm -from modest_image import ModestImage, imshow +from modest_image import imshow from tqdm import tqdm # from pyCHX.chx_libs import colors_ as mcolors, markers_ as markers from pyCHX.chx_libs import RUN_GUI, Figure from pyCHX.chx_libs import colors from pyCHX.chx_libs import colors as colors_array -from pyCHX.chx_libs import lstyles from pyCHX.chx_libs import markers from pyCHX.chx_libs import markers as markers_array -from pyCHX.chx_libs import markers_copy, mcolors, multi_tau_lags +from pyCHX.chx_libs import multi_tau_lags def delays(num_lev=3, num_buf=4, time=1): @@ -165,19 +161,23 @@ def run_time(t0): print("Total time: %.2f min" % (elapsed_time / 60.0)) -def get_each_frame_ROI_intensity(data_pixel, bad_pixel_threshold=1e10, plot_=False, *argv, **kwargs): +def get_each_frame_ROI_intensity( + data_pixel, bad_pixel_threshold=1e10, plot_=False, *argv, **kwargs +): """ Dec 16, 2015, Y.G.@CHX Get the ROI intensity of each frame Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: + Usage: imgsum, bad_frame_list = get_each_frame_intensity( data_pixel, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -250,7 +250,9 @@ def auto_two_Array(data, rois, data_pixel=None): sum1 = (np.average(data_pixel_qi, axis=1)).reshape(1, noframes) sum2 = sum1.T - g12b[:, :, qi - 1] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, qi - 1] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) # print ( proi, int( qi //( Unitq) ) ) # if int( qi //( Unitq) ) == proi: # sys.stdout.write("#") @@ -361,7 +363,7 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, a two correlation function, shape as ( imgs_length, imgs_length, noqs ) @@ -400,7 +402,9 @@ def get_aged_g2_from_g12(g12, age_edge, age_center): return g2_aged -def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8): +def get_aged_g2_from_g12q( + g12q, age_edge, age_center=None, timeperframe=1, time_sampling="log", num_bufs=8 +): """ @@ -410,7 +414,7 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ Dec 16, 2015, Y.G.@CHX Revised at April 19, 2017 Get one-time correlation function of different age from 1q-two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -472,11 +476,13 @@ def get_aged_g2_from_g12q(g12q, age_edge, age_center=None, timeperframe=1, time_ return lag_dict, g2_aged -def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1): +def get_aged_g2_from_g12q2( + g12q, slice_num=6, slice_width=5, slice_start=0, slice_end=1 +): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function of different age from two correlation function - namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation function Parameters: g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) @@ -501,7 +507,9 @@ def get_aged_g2_from_g12q2(g12q, slice_num=6, slice_width=5, slice_start=0, slic arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) g2_aged = {} @@ -560,7 +568,9 @@ def show_g12q_aged_g2( age_center = np.array(list(sorted(g2_aged.keys()))) print("the cut age centers are: " + str(age_center)) - age_center = np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 # in pixel + age_center = ( + np.int_(np.array(list(sorted(g2_aged.keys()))) / timeperframe) * 2 + ) # in pixel M, N = g12q.shape # fig, ax = plt.subplots( figsize = (8,8) ) @@ -743,7 +753,9 @@ def plot_aged_g2(g2_aged, tau=None, timeperframe=1, ylim=None, xlim=None): # get fout-time -def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None): +def get_tau_from_g12q( + g12q, slice_num=6, slice_width=1, slice_start=None, slice_end=None +): """ Dec 16, 2015, Y.G.@CHX Get tau lines from two correlation function @@ -773,7 +785,9 @@ def get_tau_from_g12q(g12q, slice_num=6, slice_width=1, slice_start=None, slice_ arr = rotate_g12q_to_rectangle(g12q) m, n = arr.shape # m should be 2*n-1 - age_edge, age_center = get_qedge(qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num) + age_edge, age_center = get_qedge( + qstart=slice_start, qend=slice_end, qwidth=slice_width, noqs=slice_num + ) age_edge, age_center = np.int_(age_edge), np.int_(age_center) # print (age_edge, age_center) tau = {} @@ -859,7 +873,9 @@ def show_g12q_taus(g12q, taus, slice_width=10, timeperframe=1, vmin=1, vmax=1.25 for i in sorted(taus.keys()): gx = np.arange(len(taus[i])) * timeperframe marker = next(markers) - ax1.plot(gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe)) + ax1.plot( + gx, taus[i], "-%s" % marker, label=r"$tau= %.1f s$" % (i * timeperframe) + ) ax1.set_ylim(vmin, vmax) ax1.set_xlabel(r"$t (s)$", fontsize=5) ax1.set_ylabel("g2") @@ -903,7 +919,7 @@ def histogram_taus(taus, hisbin=20, plot=True, timeperframe=1): if plot: fig, ax1 = plt.subplots(figsize=(8, 8)) - ax1.set_title("Tau_histgram") + ax1.set_title("Tau_histogram") for key in sorted(his.keys()): tx = 0.5 * (his[key][1][:-1] + his[key][1][1:]) marker = next(markers) @@ -916,7 +932,7 @@ def histogram_taus(taus, hisbin=20, plot=True, timeperframe=1): # ax1.set_ylim( 1.05,1.35 ) ax1.set_xlim(1.05, 1.35) ax1.set_xlabel(r"$g_2$", fontsize=19) - ax1.set_ylabel(r"histgram of g2 @ tau", fontsize=15) + ax1.set_ylabel(r"histogram of g2 @ tau", fontsize=15) # ax1.set_xscale('log') ax1.legend(fontsize="large", loc="best") # plt.show() @@ -933,7 +949,7 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -963,7 +979,9 @@ def get_one_time_from_two_time_old(g12, norms=None, nopr=None): yn = norms[:, q] yn1 = np.average(yn[tau:]) yn2 = np.average(yn[: m - tau]) - g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / (yn1 * yn2 * nopr[q]) + g2f12[tau, q] = np.nanmean(np.diag(y, k=int(tau))) / ( + yn1 * yn2 * nopr[q] + ) return g2f12 @@ -972,7 +990,7 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): """ Dec 16, 2015, Y.G.@CHX Get one-time correlation function from two correlation function - namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + namely, calculate the mean of each diag line of g12 to get one-time correlation function Parameters: g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) @@ -1000,7 +1018,8 @@ def get_one_time_from_two_time(g12, norms=None, nopr=None): yn = norms[:, q] g2f12[i, q] = np.array( [ - np.nanmean(g12[:, :, q].diagonal(i)) / (np.average(yn[i:]) * np.average(yn[: m - i]) * nopr[q]) + np.nanmean(g12[:, :, q].diagonal(i)) + / (np.average(yn[i:]) * np.average(yn[: m - i]) * nopr[q]) for i in range(m) ] ) @@ -1011,7 +1030,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): """ Dec 16, 2015, Y.G.@CHX Get four-time correlation function from two correlation function - namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion + namely, calculate the deviation of each diag line of g12 to get four-time correlation function TOBEDONE: deal with bad frames Parameters: @@ -1019,7 +1038,7 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): Options: g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q) - one-time correlation fucntion, for normalization of the four-time + one-time correlation function, for normalization of the four-time rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end] Return: @@ -1037,11 +1056,18 @@ def get_four_time_from_two_time(g12, g2=None, rois=None): else: norm = 1.0 if rois is None: - g4f12 = np.array([(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [(np.nanstd(g12.diagonal(i), axis=1)) ** 2 / norm for i in range(m)] + ) else: x1, x2, y1, y2 = rois - g4f12 = np.array([(np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm for i in range(m)]) + g4f12 = np.array( + [ + (np.nanstd(g12[x1:x2, y1:y2, :].diagonal(i), axis=1)) ** 2 / norm + for i in range(m) + ] + ) return g4f12 @@ -1306,7 +1332,9 @@ def show_C12( fig, ax = fig_ax # extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] - extent = np.array([N1, N2, N1, N2]) * timeperframe + timeoffset ### added timeoffset to extend + extent = ( + np.array([N1, N2, N1, N2]) * timeperframe + timeoffset + ) ### added timeoffset to extend if logs: im = imshow( diff --git a/pyCHX/v2/_futurepyCHX/XPCS_GiSAXS.py b/pyCHX/v2/_futurepyCHX/XPCS_GiSAXS.py index 8c57ff8..6f8269c 100644 --- a/pyCHX/v2/_futurepyCHX/XPCS_GiSAXS.py +++ b/pyCHX/v2/_futurepyCHX/XPCS_GiSAXS.py @@ -4,18 +4,19 @@ This module is for the GiSAXS XPCS analysis """ -from skbeam.core.accumulators.binned_statistic import BinnedStatistic1D, BinnedStatistic2D +from skbeam.core.accumulators.binned_statistic import ( + BinnedStatistic1D, + BinnedStatistic2D, +) from pyCHX.chx_compress import ( Multifile, compress_eigerdata, get_avg_imgc, - init_compress_eigerdata, - read_compressed_eigerdata, ) from pyCHX.chx_correlationc import cal_g2c from pyCHX.chx_generic_functions import * -from pyCHX.chx_libs import colors, colors_, markers, markers_ +from pyCHX.chx_libs import colors, markers def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None): @@ -23,9 +24,9 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) Get xpcs roi of gisaxs by giving Qr centers/edges, Qz centers/edges Parameters: qr_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ]. - each elment has two values for the start and end of one qr edge + each element has two values for the start and end of one qr edge qz_edge: list, e.g., [ [0.01,0.02], [0.03,0.04] ] - each elment has two values for the start and end of one qz edge + each element has two values for the start and end of one qz edge qr_map: two-d array, the same shape as gisaxs frame, a qr map qz_map: two-d array, the same shape as gisaxs frame, a qz map mask: array, the scattering mask @@ -44,14 +45,18 @@ def get_gisaxs_roi2(qr_edge, qz_edge, qr_map, qz_map, mask=None, qval_dict=None) qz_center = 0.5 * (qz_edge[:, 0] + qz_edge[:, 1]) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -76,14 +81,18 @@ def get_gisaxs_roi(Qr, Qz, qr_map, qz_map, mask=None, qval_dict=None): qz_edge, qz_center = get_qedge(*Qz) label_array_qz = get_qmap_label(qz_map, qz_edge) label_array_qr = get_qmap_label(qr_map, qr_edge) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr) labels_qz, indices_qz = roi.extract_label_indices(label_array_qz) labels_qr, indices_qr = roi.extract_label_indices(label_array_qr) if mask is None: mask = 1 roi_mask = label_array_qzr * mask - qval_dict = get_qval_dict(np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict) + qval_dict = get_qval_dict( + np.round(qr_center, 5), np.round(qz_center, 5), qval_dict=qval_dict + ) return roi_mask, qval_dict @@ -116,7 +125,7 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_qr( avg_imgr, Qr, Qz, qr, qz, new_mask) """ @@ -133,7 +142,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -142,7 +153,9 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ = data * label_array_qzr qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -150,9 +163,13 @@ def get_qr(data, Qr, Qz, qr, qz, mask=None): data_ave = np.interp(qr_ave_intp, qr_ave, data_ave) # columns.append( ['qr%s'%i, str(round(qzc_,4))] ) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # df = DataFrame( df ) # df.columns = np.concatenate( columns ) @@ -182,7 +199,7 @@ def cal_1d_qr( Dec 16, 2016, Y.G.@CHX calculate one-d of I(q) as a function of qr for different qz data: a dataframe - Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the defination of qr range (qr number does not matter) + Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the definition of qr range (qr number does not matter) Qz: info for qz, = qz_start, qz_end, qz_width , qz_num qr: qr-map qz: qz-map @@ -210,7 +227,7 @@ def cal_1d_qr( Qr = [qr_start , qr_end, qr_width, qr_num] Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask) @@ -232,7 +249,9 @@ def cal_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -247,7 +266,9 @@ def cal_1d_qr( qr_ave = (np.sum(qr_, axis=0))[w] / roi_pixel_num[w] data_ave = (np.sum(data_, axis=0))[w] / roi_pixel_num[w] - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) columns.append(["qr"]) @@ -257,7 +278,9 @@ def cal_1d_qr( # qr_1d[i]= [qr_ave_intp, data_ave] columns.append(["qz%s=%s" % (i, str(round(qzc_, 4)))]) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: df = np.hstack([df, data_ave.reshape(N_interp, 1)]) df = DataFrame(df) @@ -271,11 +294,26 @@ def cal_1d_qr( filename = os.path.join(path, "%s_qr_1d.csv" % (uid)) df.to_csv(filename) if print_save_message: - print("The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as %s_qr_1d.csv" % (path, uid) + ) return df -def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, save=True, *argv, **kwargs): +def get_t_qrc( + FD, + frame_edge, + Qr, + Qz, + qr, + qz, + mask=None, + path=None, + uid=None, + save=True, + *argv, + **kwargs, +): """Get t-dependent qr Parameters @@ -327,11 +365,15 @@ def get_t_qrc(FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, sa uid = setup_pargs["uid"] filename = os.path.join(path, "%s_qrt_pds.csv" % (uid)) qrt_pds.to_csv(filename) - print("The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid)) + print( + "The qr~time is saved in %s with filename as %s_qrt_pds.csv" % (path, uid) + ) return qrt_pds -def plot_qrt_pds(qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs): +def plot_qrt_pds( + qrt_pds, frame_edge, qz_index=0, uid="uid", path="", fontsize=8, *argv, **kwargs +): """Y.G. Jan 04, 2017 plot t-dependent qr @@ -388,7 +430,7 @@ def plot_t_qrc(qr_1d, frame_edge, save=False, pargs=None, fontsize=8, *argv, **k qr_1d: array, with shape as time length, frame_edge frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ] save: save the plot - if save, all the following paramters are given in argv + if save, all the following parameters are given in argv { 'path': 'uid': } @@ -458,7 +500,9 @@ def make_gisaxs_grid(qr_w=10, qz_w=12, dim_r=100, dim_z=120): ########################################### -def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum"): +def convert_Qmap( + img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, statistic="sum" +): """Y.G. Nov 3@CHX Convert a scattering image to a qmap by giving qx_map and qy_map Return converted qmap, x-coordinates and y-coordinates @@ -475,9 +519,18 @@ def convert_Qmap(img, qx_map, qy_map=None, bins=None, rangeq=None, mask=None, st else: m = None b2d = BinnedStatistic2D( - qx_map.ravel(), qy_map.ravel(), statistic=statistic, bins=bins, mask=m, range=rangeq + qx_map.ravel(), + qy_map.ravel(), + statistic=statistic, + bins=bins, + mask=m, + range=rangeq, + ) + remesh_data, xbins, ybins = ( + b2d(img.ravel()), + b2d.bin_centers[0], + b2d.bin_centers[1], ) - remesh_data, xbins, ybins = b2d(img.ravel()), b2d.bin_centers[0], b2d.bin_centers[1] else: if rangeq is None: qx_min, qx_max = qx_map.min(), qx_map.max() @@ -515,7 +568,14 @@ def get_refl_xy(inc_ang, inc_phi, inc_x0, inc_y0, pixelsize=[0.075, 0.075], Lsd= def get_alphaf_thetaf( - inc_x0, inc_y0, inc_ang, inc_phi=0, pixelsize=[0.075, 0.075], Lsd=5000, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + inc_ang, + inc_phi=0, + pixelsize=[0.075, 0.075], + Lsd=5000, + dimx=2070.0, + dimy=2167.0, ): """Nov 19, 2018@SMI to get alphaf and thetaf for gi scattering Input: @@ -543,7 +603,7 @@ def convert_gisaxs_pixel_to_q2( alphaf, thetaf, phi=0, - lamda=1.0, + lambda=1.0, thetai=0.0, ): """ @@ -558,7 +618,7 @@ def convert_gisaxs_pixel_to_q2( get: q_parallel (qp), q_direction_z (qz) """ - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda alphai = np.radians(inc_ang) thetai = np.radians(thetai) phi = np.radians(phi) @@ -585,7 +645,9 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls Lsd = Lsd / 1000.0 px, py = pixelsize - phi = np.arctan2((-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6)) + phi = np.arctan2( + (-refl_x0 + inc_x0) * px * 10 ** (-6), (refl_y0 - inc_y0) * py * 10 ** (-6) + ) alphai = np.arctan2((refl_y0 - inc_y0) * py * 10 ** (-6), Lsd) / 2.0 # thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #?? @@ -593,7 +655,15 @@ def get_incident_angles(inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Ls def get_reflected_angles( - inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + thetai=0.0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, ): """Dec 16, 2015, Y.G.@CHX giving: incident beam center: bcenx,bceny @@ -619,7 +689,16 @@ def get_reflected_angles( def convert_gisaxs_pixel_to_q( - inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75, 75], Lsd=5.0, dimx=2070.0, dimy=2167.0, thetai=0.0, lamda=1.0 + inc_x0, + inc_y0, + refl_x0, + refl_y0, + pixelsize=[75, 75], + Lsd=5.0, + dimx=2070.0, + dimy=2167.0, + thetai=0.0, + lambda=1.0, ): """ Dec 16, 2015, Y.G.@CHX @@ -636,7 +715,7 @@ def convert_gisaxs_pixel_to_q( alphaf, thetaf, alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd, dimx, dimy ) - pref = 2 * np.pi / lamda + pref = 2 * np.pi / lambda qx = np.cos(alphaf) * np.cos(2 * thetaf) - np.cos(alphai) * np.cos(2 * thetai) qy_ = np.cos(alphaf) * np.sin(2 * thetaf) - np.cos(alphai) * np.sin(2 * thetai) qz_ = np.sin(alphaf) + np.sin(alphai) @@ -655,7 +734,6 @@ def get_qedge(qstart, qend, qwidth, noqs, verbose=True): return a qedge by giving the noqs, qstart,qend,qwidth. a qcenter, which is center of each qedge KEYWORD: None""" - import numpy as np if noqs != 1: spacing = (qend - qstart - noqs * qwidth) / (noqs - 1) # spacing between rings @@ -735,13 +813,23 @@ def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center): for i, label in enumerate(uqzr): # print (i, label) - label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[i] + label_array_qzr_.ravel()[np.where(label_array_qzr.ravel() == label)[0]] = newl[ + i + ] return np.int_(label_array_qzr_), np.array(qzc), np.concatenate(np.array(qrc)) def show_label_array_on_image( - ax, image, label_array, cmap=None, norm=None, log_img=True, alpha=0.3, imshow_cmap="gray", **kwargs + ax, + image, + label_array, + cmap=None, + norm=None, + log_img=True, + alpha=0.3, + imshow_cmap="gray", + **kwargs, ): # norm=LogNorm(), """ This will plot the required ROI's(labeled array) on the image @@ -771,9 +859,13 @@ def show_label_array_on_image( """ ax.set_aspect("equal") if log_img: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=LogNorm(norm), **kwargs + ) # norm=norm, else: - im = ax.imshow(image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs) # norm=norm, + im = ax.imshow( + image, cmap=imshow_cmap, interpolation="none", norm=norm, **kwargs + ) # norm=norm, im_label = mpl_plot.show_label_array( ax, label_array, cmap=cmap, norm=norm, alpha=alpha, **kwargs @@ -784,7 +876,7 @@ def show_label_array_on_image( def show_qz(qz): """Dec 16, 2015, Y.G.@CHX - plot qz mape + plot qz map """ @@ -797,7 +889,7 @@ def show_qz(qz): def show_qr(qr): """Dec 16, 2015, Y.G.@CHX - plot qr mape + plot qr map """ fig, ax = plt.subplots() @@ -811,12 +903,14 @@ def show_alphaf( alphaf, ): """Dec 16, 2015, Y.G.@CHX - plot alphaf mape + plot alphaf map """ fig, ax = plt.subplots() - im = ax.imshow(alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5) + im = ax.imshow( + alphaf * 180 / np.pi, origin="lower", cmap="viridis", vmin=-1, vmax=1.5 + ) # im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00)) fig.colorbar(im) ax.set_title("alphaf") @@ -877,7 +971,7 @@ def get_1d_qr( Qz= [qz_start, qz_end, qz_width , qz_num ] new_mask[ :, 1020:1045] =0 ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lambda=lambda, Lsd=Lsd ) qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8) @@ -898,7 +992,9 @@ def get_1d_qr( if show_roi: label_array_qz0 = get_qmap_label(qz, qz_edge) - label_array_qzr0, qzc0, qrc0 = get_qzrmap(label_array_qz0, label_array_qr, qz_center, qr_center) + label_array_qzr0, qzc0, qrc0 = get_qzrmap( + label_array_qz0, label_array_qr, qz_center, qr_center + ) if mask is not None: label_array_qzr0 *= mask @@ -912,7 +1008,9 @@ def get_1d_qr( # print (i,qzc_) label_array_qz = get_qmap_label(qz, qz_edge[i * 2 : 2 * i + 2]) # print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center) + label_array_qzr, qzc, qrc = get_qzrmap( + label_array_qz, label_array_qr, qz_center, qr_center + ) # print (np.unique(label_array_qzr )) if mask is not None: label_array_qzr *= mask @@ -922,7 +1020,9 @@ def get_1d_qr( qr_ave = np.sum(qr_, axis=0) / roi_pixel_num data_ave = np.sum(data_, axis=0) / roi_pixel_num - qr_ave, data_ave = zip(*sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]]))) + qr_ave, data_ave = zip( + *sorted(zip(*[qr_ave[~np.isnan(qr_ave)], data_ave[~np.isnan(data_ave)]])) + ) if i == 0: N_interp = len(qr_ave) @@ -937,9 +1037,13 @@ def get_1d_qr( else: ax.plot(qr_ave_intp, data_ave, "--o", label="qz= %f" % qzc_) if i == 0: - df = np.hstack([(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [(qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) else: - df = np.hstack([df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)]) + df = np.hstack( + [df, (qr_ave_intp).reshape(N_interp, 1), data_ave.reshape(N_interp, 1)] + ) # ax.set_xlabel( r'$q_r$', fontsize=15) ax.set_xlabel(r"$q_r$" r"($\AA^{-1}$)", fontsize=18) @@ -960,7 +1064,9 @@ def get_1d_qr( # filename = os.path.join(path, 'qr_1d-%s-%s.csv' % (uid,CurTime)) filename = os.path.join(path, "uid=%s--qr_1d.csv" % (uid)) df.to_csv(filename) - print("The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid)) + print( + "The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv" % (path, uid) + ) # fp = path + 'Uid= %s--Circular Average'%uid + CurTime + '.png' fp = path + "uid=%s--qr_1d-" % uid + ".png" @@ -1082,13 +1188,17 @@ def get_qr_tick_label(qr, label_array_qr, inc_x0, interp=True): rticks_label = np.array(rticks_label) try: w = np.where(rticks <= inc_x0)[0] - rticks1 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks1 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks_label1 = np.round(rticks_label[w], 3) except: rticks_label1 = [] try: w = np.where(rticks > inc_x0)[0] - rticks2 = np.int_(np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w])) + rticks2 = np.int_( + np.interp(np.round(rticks_label[w], 3), rticks_label[w], rticks[w]) + ) rticks = np.append(rticks1, rticks2) rticks_label2 = np.round(rticks_label[w], 3) except: @@ -1134,7 +1244,17 @@ def get_qz_tick_label(qz, label_array_qz, interp=True): return zticks, zticks_label -def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_label=True, *argv, **kwargs): +def get_qzr_map( + qr, + qz, + inc_x0, + Nzline=10, + Nrline=10, + interp=True, + return_qrz_label=True, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX Calculate a qzr map of a gisaxs image (data) without plot @@ -1154,16 +1274,20 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space else: return the additional two below - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image Examples: ticks = get_qzr_map( qr, qz, inc_x0 ) """ qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1175,18 +1299,41 @@ def get_qzr_map(qr, qz, inc_x0, Nzline=10, Nrline=10, interp=True, return_qrz_la zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) ticks = [zticks, zticks_label, rticks, rticks_label] if return_qrz_label: - return zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz + return ( + zticks, + zticks_label, + rticks, + rticks_label, + label_array_qr, + label_array_qz, + ) else: return zticks, zticks_label, rticks, rticks_label -def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin=0.001, vmax=1e1, *argv, **kwargs): +def plot_qzr_map( + qr, + qz, + inc_x0, + ticks=None, + data=None, + uid="uid", + path="", + vmin=0.001, + vmax=1e1, + *argv, + **kwargs, +): """ Dec 31, 2016, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1201,8 +1348,8 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin zticks_label: list, z-tick positions in unit of real space rticks: list, r-tick positions in unit of pixel rticks_label: list, r-tick positions in unit of real space - label_array_qr: qr label array with the same shpae as gisaxs image - label_array_qz: qz label array with the same shpae as gisaxs image + label_array_qr: qr label array with the same shape as gisaxs image + label_array_qz: qz label array with the same shape as gisaxs image inc_x0: the incident beam center x Options: @@ -1225,11 +1372,13 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin import matplotlib.pyplot as plt if ticks is None: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = get_qzr_map( - qr, qz, inc_x0, return_qrz_label=True + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + get_qzr_map(qr, qz, inc_x0, return_qrz_label=True) ) else: - zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ticks + zticks, zticks_label, rticks, rticks_label, label_array_qr, label_array_qz = ( + ticks + ) cmap = "viridis" _cmap = copy.copy((mcm.get_cmap(cmap))) @@ -1239,7 +1388,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=vmin, vmax=vmax) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1268,7 +1419,9 @@ def plot_qzr_map(qr, qz, inc_x0, ticks=None, data=None, uid="uid", path="", vmin fig.savefig(fp, dpi=fig.dpi) -def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs): +def show_qzr_map( + qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, *argv, **kwargs +): """ Dec 16, 2015, Y.G.@CHX plot a qzr map of a gisaxs image (data) @@ -1308,8 +1461,12 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * qr_start, qr_end, qr_num = qr.min(), qr.max(), Nrline qz_start, qz_end, qz_num = qz.min(), qz.max(), Nzline - qr_edge, qr_center = get_qedge(qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num) - qz_edge, qz_center = get_qedge(qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num) + qr_edge, qr_center = get_qedge( + qr_start, qr_end, (qr_end - qr_start) / (qr_num + 100), qr_num + ) + qz_edge, qz_center = get_qedge( + qz_start, qz_end, (qz_end - qz_start) / (qz_num + 100), qz_num + ) label_array_qz = get_qmap_label(qz, qz_edge) label_array_qr = get_qmap_label(qr, qr_edge) @@ -1325,7 +1482,9 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * data = qr + qz im = ax.imshow(data, cmap="viridis", origin="lower") else: - im = ax.imshow(data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1)) + im = ax.imshow( + data, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e1) + ) imr = ax.imshow( label_array_qr, origin="lower", cmap="viridis", vmin=0.5, vmax=None @@ -1334,7 +1493,7 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * label_array_qz, origin="lower", cmap="viridis", vmin=0.5, vmax=None ) # ,interpolation='nearest',) - # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, heigth + # caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, height # cba = fig.colorbar(im, cax=caxr ) # cba = fig.colorbar(im, fraction=0.046, pad=0.04) @@ -1351,9 +1510,13 @@ def show_qzr_map(qr, qz, inc_x0, data=None, Nzline=10, Nrline=10, interp=True, * zticks, zticks_label = get_qz_tick_label(qz, label_array_qz) # rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) try: - rticks, rticks_label = zip(*np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *np.sort(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) except: - rticks, rticks_label = zip(*sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp)))) + rticks, rticks_label = zip( + *sorted(zip(*get_qr_tick_label(qr, label_array_qr, inc_x0, interp=interp))) + ) # stride = int(len(zticks)/10) stride = 1 @@ -1400,7 +1563,7 @@ def show_qzr_roi( save=False, return_fig=False, *argv, - **kwargs + **kwargs, ): """ Dec 16, 2015, Y.G.@CHX @@ -1648,7 +1811,9 @@ def plot_gisaxs_g2(g2, taus, res_pargs=None, one_plot=False, *argv, **kwargs): # plot g2 results -def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs): +def plot_gisaxs_two_g2( + g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX plot g2 results, g2: one-time correlation function from a multi-tau method @@ -1804,7 +1969,9 @@ def plot_gisaxs_two_g2(g2, taus, g2b, tausb, res_pargs=None, one_plot=False, *ar # plt.show() -def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs): +def save_gisaxs_g2( + g2, res_pargs, time_label=False, taus=None, filename=None, *argv, **kwargs +): """ Aug 8, 2016, Y.G.@CHX save g2 results, @@ -1843,14 +2010,23 @@ def save_gisaxs_g2(g2, res_pargs, time_label=False, taus=None, filename=None, *a if filename is None: if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) else: filename = os.path.join(path, filename) df.to_csv(filename) - print("The correlation function of uid= %s is saved with filename as %s" % (uid, filename)) + print( + "The correlation function of uid= %s is saved with filename as %s" + % (uid, filename) + ) def stretched_auto_corr_scat_factor(x, beta, relaxation_rate, alpha=1.0, baseline=1): @@ -1861,7 +2037,9 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): return beta * np.exp(-2 * relaxation_rate * x) + baseline -def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs): +def fit_gisaxs_g2( + g2, res_pargs, function="simple_exponential", one_plot=False, *argv, **kwargs +): """ July 20,2016, Y.G.@CHX Fit one-time correlation function @@ -1886,12 +2064,12 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1921,7 +2099,9 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) @@ -1966,9 +2146,16 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, baseline_ = kwargs["guess_values"]["baseline"] else: baseline_ = 1.0 - pars = mod.make_params(beta=beta_, alpha=alpha_, relaxation_rate=relaxation_rate_, baseline=baseline_) + pars = mod.make_params( + beta=beta_, + alpha=alpha_, + relaxation_rate=relaxation_rate_, + baseline=baseline_, + ) else: - pars = mod.make_params(beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0) + pars = mod.make_params( + beta=0.05, alpha=1.0, relaxation_rate=0.005, baseline=1.0 + ) for v in _vars: pars["%s" % v].vary = False @@ -2034,12 +2221,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ax.set_xlim(kwargs["xlim"]) txts = r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.55 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$\alpha$" + r"$ = %.3f$" % (alpha[i]) # txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' - ax.text(x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.45 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) txts = r"$baseline$" + r"$ = %.3f$" % (baseline[i]) - ax.text(x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.02, y=0.35 + 0.3, s=txts, fontsize=14, transform=ax.transAxes + ) result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) fp = path + "uid=%s--g2-qz=%s--fit" % (uid, qz_center[qz_ind]) + ".png" @@ -2105,8 +2298,20 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, # print( result1.best_values['relaxation_rate'], result1.best_values['beta'] ) - txts = r"$q_z$" + r"$_%s$" % qz_ind + r"$\tau$" + r"$ = %.3f$" % (1 / rate[i]) + r"$ s$" - ax.text(x=0.02, y=0.55 + 0.3 - 0.1 * qz_ind, s=txts, fontsize=14, transform=ax.transAxes) + txts = ( + r"$q_z$" + + r"$_%s$" % qz_ind + + r"$\tau$" + + r"$ = %.3f$" % (1 / rate[i]) + + r"$ s$" + ) + ax.text( + x=0.02, + y=0.55 + 0.3 - 0.1 * qz_ind, + s=txts, + fontsize=14, + transform=ax.transAxes, + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -2144,14 +2349,18 @@ def fit_gisaxs_g2(g2, res_pargs, function="simple_exponential", one_plot=False, ############################### -def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs): +def get_each_box_mean_intensity( + data_series, box_mask, sampling, timeperframe, plot_=True, *argv, **kwargs +): """Dec 16, 2015, Y.G.@CHX get each box (ROI) mean intensity as a function of time """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), box_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), box_mask + ) try: N = len(data_series) except: @@ -2166,7 +2375,13 @@ def get_each_box_mean_intensity(data_series, box_mask, sampling, timeperframe, p ax.set_title("uid= %s--Mean intensity of each box" % uid) for i in range(num_rings): - ax.plot(times[::sampling], mean_int_sets[:, i], label="Box " + str(i + 1), marker="o", ls="-") + ax.plot( + times[::sampling], + mean_int_sets[:, i], + label="Box " + str(i + 1), + marker="o", + ls="-", + ) ax.set_xlabel("Time") ax.set_ylabel("Mean Intensity") ax.legend() @@ -2250,14 +2465,22 @@ def fit_qr_qz_rate(qr, qz, rate, plot_=True, *argv, **kwargs): ax.plot(x**power, res[i].best_fit, "-r") txts = r"$D0: %.3e$" % D0[i] + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) legend = ax.legend(loc="best") ax.set_ylabel("Relaxation rate " r"$\gamma$" "($s^{-1}$)") ax.set_xlabel("$q^%s$" r"($\AA^{-2}$)" % power) dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) # fp = path + 'Q%s-Rate--uid=%s'%(power,uid) + CurTime + '--Fit.png' fp = path + "uid=%s--Q-Rate" % (uid) + "--fit-.png" fig.savefig(fp, dpi=fig.dpi) @@ -2503,7 +2726,9 @@ def multi_uids_gisaxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms Ldet = md["detector_distance"] # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs exposuretime = md["count_time"] @@ -2512,7 +2737,12 @@ def multi_uids_gisaxs_xpcs_analysis( # timeperframe = exposuretime#for visiblitly # timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata.... setup_pargs = dict( - uid=uid, dpix=dpix, Ldet=Ldet, lambda_=lambda_, timeperframe=timeperframe, path=data_dir + uid=uid, + dpix=dpix, + Ldet=Ldet, + lambda_=lambda_, + timeperframe=timeperframe, + path=data_dir, ) md["avg_img"] = avg_imgr @@ -2527,17 +2757,34 @@ def multi_uids_gisaxs_xpcs_analysis( else: good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) if not para_run: g2, lag_steps_ = cal_g2c( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) else: g2, lag_steps_ = cal_g2p( - FD, box_maskr, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=None + FD, + box_maskr, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=None, ) if len(lag_steps) < len(lag_steps_): @@ -2550,7 +2797,11 @@ def multi_uids_gisaxs_xpcs_analysis( good_start = 0 good_series = apply_mask(imgsar[good_start:], maskr) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2559,14 +2810,25 @@ def multi_uids_gisaxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, box_maskr, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + box_maskr, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, qz_center=qz_center, qr_center=qr_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, + qz_center=qz_center, + qr_center=qr_center, + path=data_dir_, + uid=uid, + ) save_gisaxs_g2(g2, res_pargs) # plot_gisaxs_g2( g2, taus, vlim=[0.95, 1.1], res_pargs=res_pargs, one_plot=True) @@ -2576,14 +2838,33 @@ def multi_uids_gisaxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.1], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.229, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.229, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, one_plot=True, ) - fit_qr_qz_rate(qr_center, qz_center, fit_result, power_variable=False, uid=uid, path=data_dir_) + fit_qr_qz_rate( + qr_center, + qz_center, + fit_result, + power_variable=False, + uid=uid, + path=data_dir_, + ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 diff --git a/pyCHX/v2/_futurepyCHX/XPCS_SAXS.py b/pyCHX/v2/_futurepyCHX/XPCS_SAXS.py index c59f6cc..2f36af1 100644 --- a/pyCHX/v2/_futurepyCHX/XPCS_SAXS.py +++ b/pyCHX/v2/_futurepyCHX/XPCS_SAXS.py @@ -7,21 +7,16 @@ import os from pandas import DataFrame -from scipy.special import erf from pyCHX.chx_compress_analysis import ( Multifile, compress_eigerdata, get_avg_imgc, - get_each_ring_mean_intensityc, - init_compress_eigerdata, - mean_intensityc, - read_compressed_eigerdata, ) -from pyCHX.chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq +from pyCHX.chx_correlationc import cal_g2c, get_pixelist_interp_iq from pyCHX.chx_correlationp import cal_g2p from pyCHX.chx_generic_functions import * -from pyCHX.chx_libs import RUN_GUI, Figure, colors, colors_, colors_copy, markers, markers_, markers_copy +from pyCHX.chx_libs import RUN_GUI, Figure, colors, markers def get_iq_invariant(qt, iqst): @@ -111,9 +106,9 @@ def recover_img_from_iq(qp, iq, center, mask): return img_ -def get_cirucular_average_std(img, mask, setup_pargs, img_name="xx"): +def get_circular_average_std(img, mask, setup_pargs, img_name="xx"): """YG. develop at CHX, 2017 July 18, - Get the standard devation of tge circular average of img + Get the standard deviation of the circular average of img image-->I(q)-->image_mean--> (image- image_mean)**2 --> I(q) --> std = sqrt(I(q)) """ qp, iq, q = get_circular_average(img, mask, pargs=setup_pargs, save=False) @@ -132,7 +127,15 @@ def get_delta_img(img, mask, setup_pargs, img_name="xx", plot=False): img_ = recover_img_from_iq(qp, iq, center, mask) delta = img - img_ * img.mean() / img_.mean() if plot: - show_img(delta, logs=True, aspect=1, cmap=cmap_albula, vmin=1e-5, vmax=10**1, image_name=img_name) + show_img( + delta, + logs=True, + aspect=1, + cmap=cmap_albula, + vmin=1e-5, + vmax=10**1, + image_name=img_name, + ) return delta @@ -167,7 +170,9 @@ def combine_ring_anglar_mask(ring_mask, ang_mask): return np.int_(ring_ang_) -def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -194,7 +199,9 @@ def get_seg_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, ce return seg_mask, qval_dict -def get_seg_dict_from_ring_mask(inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center): +def get_seg_dict_from_ring_mask( + inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center +): """YG. Jan 6, 2017 A simple wrap function to get angle cut mask from ring_mask Parameter: @@ -247,7 +254,11 @@ def combine_two_roi_mask(ring_mask, ang_mask, pixel_num_thres=10): for i, ind in enumerate(ruiq[1:]): ring_mask_.ravel()[np.where(rf == ind)[0]] = maxa * i - new_mask = (ring_mask_ + ang_mask) * np.array(ring_mask, dtype=bool) * np.array(ang_mask, dtype=bool) + new_mask = ( + (ring_mask_ + ang_mask) + * np.array(ring_mask, dtype=bool) + * np.array(ang_mask, dtype=bool) + ) qind, pixelist = roi.extract_label_indices(new_mask) noqs = len(np.unique(qind)) @@ -333,7 +344,14 @@ def bin_1D(x, y, nx=None, min_x=None, max_x=None): def circular_average( - image, calibrated_center, threshold=0, nx=None, pixel_size=(1, 1), min_x=None, max_x=None, mask=None + image, + calibrated_center, + threshold=0, + nx=None, + pixel_size=(1, 1), + min_x=None, + max_x=None, + mask=None, ): """Circular average of the the image data The circular average is also known as the radial integration @@ -417,7 +435,7 @@ def get_circular_average( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """get a circular average of an image Parameters @@ -435,8 +453,8 @@ def get_circular_average( number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -447,10 +465,22 @@ def get_circular_average( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] qp, iq = circular_average( - avg_img, center, threshold=0, nx=nx, pixel_size=(dpix, dpix), mask=mask, min_x=min_x, max_x=max_x + avg_img, + center, + threshold=0, + nx=nx, + pixel_size=(dpix, dpix), + mask=mask, + min_x=min_x, + max_x=max_x, ) qp_ = qp * dpix # convert bin_centers from r [um] to two_theta and then to q [1/px] (reciprocal space) @@ -492,12 +522,23 @@ def get_circular_average( fig.savefig(fp, dpi=fig.dpi) if save: path = pargs["path"] - save_lists([q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path) + save_lists( + [q, iq], label=["q_A-1", "Iq"], filename="%s_q_Iq.csv" % uid, path=path + ) return qp, iq, q def plot_circular_average( - qp, iq, q, pargs, show_pixel=False, loglog=False, save=True, return_fig=False, *argv, **kwargs + qp, + iq, + q, + pargs, + show_pixel=False, + loglog=False, + save=True, + return_fig=False, + *argv, + **kwargs, ): if RUN_GUI: fig = Figure() @@ -546,7 +587,18 @@ def plot_circular_average( return fig -def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False, save=False, *argv, **kwargs): +def get_angular_average( + avg_img, + mask, + pargs, + min_r, + max_r, + nx=3600, + plot_=False, + save=False, + *argv, + **kwargs, +): """get a angular average of an image Parameters ---------- @@ -563,8 +615,8 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the one-D curve - plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + plot_: a boolean type, if True, plot the one-D curve + plot_qinpixel:a boolean type, if True, the x-axis of the one-D curve is q in pixel; else in real Q Returns ------- @@ -575,11 +627,22 @@ def get_angular_average(avg_img, mask, pargs, min_r, max_r, nx=3600, plot_=False """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) uid = pargs["uid"] angq, ang = angular_average( - avg_img, calibrated_center=center, pixel_size=(dpix, dpix), nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + calibrated_center=center, + pixel_size=(dpix, dpix), + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -664,7 +727,8 @@ def angular_average( min_r = 0 if max_r is None: max_r = np.sqrt( - (image.shape[0] - calibrated_center[0]) ** 2 + (image.shape[1] - calibrated_center[1]) ** 2 + (image.shape[0] - calibrated_center[0]) ** 2 + + (image.shape[1] - calibrated_center[1]) ** 2 ) r_mask = make_ring_mask(calibrated_center, image.shape, min_r, max_r) @@ -679,7 +743,9 @@ def angular_average( bina = np.ravel(angle_val) image_mask = np.ravel(image * r_mask) - bin_edges, sums, counts = utils.bin_1D(bina, image_mask, nx, min_x=min_x, max_x=max_x) + bin_edges, sums, counts = utils.bin_1D( + bina, image_mask, nx, min_x=min_x, max_x=max_x + ) # print (counts) th_mask = counts > threshold @@ -690,7 +756,18 @@ def angular_average( return bin_centers * 180 / np.pi, ang_averages -def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, show_progress=True, *argv, **kwargs): +def get_t_iqc( + FD, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + show_progress=True, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -702,7 +779,7 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- qp: q in pixel @@ -716,7 +793,9 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho for i in range(Nt): t1, t2 = frame_edge[i] # print (t1,t2) - avg_img = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + avg_img = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) qp, iqs[i], q = get_circular_average(avg_img, mask, pargs, nx=nx, plot_=False) if plot_: @@ -761,7 +840,17 @@ def get_t_iqc(FD, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, sho return qp, np.array(iqs), q -def plot_t_iqc(q, iqs, frame_edge, pargs, save=True, return_fig=False, legend_size=None, *argv, **kwargs): +def plot_t_iqc( + q, + iqs, + frame_edge, + pargs, + save=True, + return_fig=False, + legend_size=None, + *argv, + **kwargs, +): """Plot t-dependent Iq Parameters @@ -839,7 +928,17 @@ def calc_q(L, a, wv): return q -def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=False, *argv, **kwargs): +def get_t_iq( + data_series, + frame_edge, + mask, + pargs, + nx=1500, + plot_=False, + save=False, + *argv, + **kwargs, +): """Get t-dependent Iq Parameters @@ -851,7 +950,7 @@ def get_t_iq(data_series, frame_edge, mask, pargs, nx=1500, plot_=False, save=Fa nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -915,7 +1014,7 @@ def get_t_ang( plot_=False, save=False, *argv, - **kwargs + **kwargs, ): """Get t-dependent angule intensity @@ -940,7 +1039,7 @@ def get_t_ang( nx : int, optional number of bins in x defaults is 1500 bins - plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + plot_: a boolean type, if True, plot the time~one-D curve with qp as x-axis Returns --------- @@ -957,7 +1056,13 @@ def get_t_ang( # print (t1,t2) avg_img = get_avg_img(data_series[t1:t2], sampling=1, plot_=False) qp, iqs[i] = angular_average( - avg_img, center, pixel_size=pixel_size, nx=nx, min_r=min_r, max_r=max_r, mask=mask + avg_img, + center, + pixel_size=pixel_size, + nx=nx, + min_r=min_r, + max_r=max_r, + mask=mask, ) if plot_: @@ -1047,7 +1152,7 @@ def _make_roi(coords, edges, shape): def angulars(edges, center, shape): """ - Draw annual (angluar-shaped) shaped regions of interest. + Draw annual (angular-shaped) shaped regions of interest. Each ring will be labeled with an integer. Regions outside any ring will be filled with zeros. Parameters @@ -1071,7 +1176,8 @@ def angulars(edges, center, shape): edges = np.atleast_2d(np.asarray(edges)).ravel() if not 0 == len(edges) % 2: raise ValueError( - "edges should have an even number of elements, " "giving inner, outer radii for each angular" + "edges should have an even number of elements, " + "giving inner, outer radii for each angular" ) if not np.all(np.diff(edges) > 0): raise ValueError( @@ -1104,9 +1210,9 @@ def update_angular_mask_width_edge(edge, mask, center, roi_mask): return roi_mask -def fix_angle_mask_at_PN_180(edge, mask, center, roi_mask): +def fix_angle_mask_at_ON_180(edge, mask, center, roi_mask): """YG Dev@CHX May, 2019 - to fix the problem of making angluar mask at the angle edge around +/- 180 + to fix the problem of making angular mask at the angle edge around +/- 180 Input: edge: the edge of the anglues mask: the mask of the image @@ -1192,7 +1298,9 @@ def get_angular_mask( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1206,18 +1314,18 @@ def get_angular_mask( edges2 = edges - 180 for edge_ in [edges2]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) if flow_angle is not None: edges3 = 2 * flow_angle - edges[:, ::-1] edges4 = 2 * flow_angle - edges[:, ::-1] - 180 for edge_ in [edges3, edges4]: ang_mask = update_angular_mask_width_edge(edge_, mask, center, ang_mask) - ang_mask = fix_angle_mask_at_PN_180(edge_, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edge_, mask, center, ang_mask) else: # for i, edge_ in enumerate( edges ): # print(edge_) if fix_180_angle: - ang_mask = fix_angle_mask_at_PN_180(edges, mask, center, ang_mask) + ang_mask = fix_angle_mask_at_ON_180(edges, mask, center, ang_mask) labels, indices = roi.extract_label_indices(ang_mask) nopr = np.bincount(np.array(labels, dtype=int))[1:] if len(np.where(nopr == 0)[0] != 0): @@ -1266,7 +1374,9 @@ def get_angular_mask_old( if edges is None: if num_angles != 1: - spacing = (outer_angle - inner_angle - num_angles * width) / (num_angles - 1) # spacing between rings + spacing = (outer_angle - inner_angle - num_angles * width) / ( + num_angles - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_angle, width, spacing, num_angles) @@ -1360,7 +1470,12 @@ def get_ring_mask( """ - center, Ldet, lambda_, dpix = pargs["center"], pargs["Ldet"], pargs["lambda_"], pargs["dpix"] + center, Ldet, lambda_, dpix = ( + pargs["center"], + pargs["Ldet"], + pargs["lambda_"], + pargs["dpix"], + ) # spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings # qc = np.int_( np.linspace( inner_radius,outer_radius, num_rings ) ) @@ -1373,7 +1488,9 @@ def get_ring_mask( # find the edges of the required rings if edges is None: if num_rings != 1: - spacing = (outer_radius - inner_radius - num_rings * width) / (num_rings - 1) # spacing between rings + spacing = (outer_radius - inner_radius - num_rings * width) / ( + num_rings - 1 + ) # spacing between rings else: spacing = 0 edges = roi.ring_edges(inner_radius, width, spacing, num_rings) @@ -1524,7 +1641,15 @@ def show_ring_ang_roi(data, rois, alpha=0.3, save=False, *argv, **kwargs): def plot_qIq_with_ROI( - q, iq, q_ring_center, q_ring_edge=None, logs=True, save=False, return_fig=False, *argv, **kwargs + q, + iq, + q_ring_center, + q_ring_edge=None, + logs=True, + save=False, + return_fig=False, + *argv, + **kwargs, ): """Aug 6, 2016, Y.G.@CHX Update@2019, March to make a span plot with q_ring_edge @@ -1578,12 +1703,21 @@ def plot_qIq_with_ROI( def get_each_ring_mean_intensity( - data_series, ring_mask, sampling, timeperframe, plot_=True, save=False, *argv, **kwargs + data_series, + ring_mask, + sampling, + timeperframe, + plot_=True, + save=False, + *argv, + **kwargs, ): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), ring_mask) + mean_int_sets, index_list = roi.mean_intensity( + np.array(data_series[::sampling]), ring_mask + ) times = np.arange(len(data_series)) * timeperframe # get the time for each frame num_rings = len(np.unique(ring_mask)[1:]) @@ -1614,7 +1748,9 @@ def get_each_ring_mean_intensity( # plot g2 results -def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs): +def plot_saxs_rad_ang_g2( + g2, taus, res_pargs=None, master_angle_plot=False, return_fig=False, *argv, **kwargs +): """plot g2 results of segments with radius and angle partation , g2: one-time correlation function @@ -1736,7 +1872,13 @@ def plot_saxs_rad_ang_g2(g2, taus, res_pargs=None, master_angle_plot=False, retu def fit_saxs_rad_ang_g2( - g2, res_pargs=None, function="simple_exponential", fit_range=None, master_angle_plot=False, *argv, **kwargs + g2, + res_pargs=None, + function="simple_exponential", + fit_range=None, + master_angle_plot=False, + *argv, + **kwargs, ): """ Fit one-time correlation function @@ -1753,7 +1895,7 @@ def fit_saxs_rad_ang_g2( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline #fit_vibration: @@ -1761,7 +1903,7 @@ def fit_saxs_rad_ang_g2( Returns ------- - fit resutls: + fit results: a dict, with keys as 'baseline': 'beta': @@ -1825,13 +1967,17 @@ def fit_saxs_rad_ang_g2( if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) @@ -1856,7 +2002,9 @@ def fit_saxs_rad_ang_g2( _alpha = _guess_val["alpha"] _relaxation_rate = _guess_val["relaxation_rate"] _baseline = _guess_val["baseline"] - pars = mod.make_params(beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline) + pars = mod.make_params( + beta=_beta, alpha=_alpha, relaxation_rate=_relaxation_rate, baseline=_baseline + ) if function == "flow_para_function" or function == "flow_para": _flow_velocity = _guess_val["flow_velocity"] @@ -1872,7 +2020,12 @@ def fit_saxs_rad_ang_g2( _freq = _guess_val["freq"] _amp = _guess_val["amp"] pars = mod.make_params( - beta=_beta, alpha=_alpha, freq=_freq, amp=_amp, relaxation_rate=_relaxation_rate, baseline=_baseline + beta=_beta, + alpha=_alpha, + freq=_freq, + amp=_amp, + relaxation_rate=_relaxation_rate, + baseline=_baseline, ) for v in _vars: @@ -1967,7 +2120,9 @@ def fit_saxs_rad_ang_g2( if function == "flow_para_function" or function == "flow_para": txts = r"$flow_v$" + r"$ = %.3f$" % (flow[i]) - ax.text(x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - 0.3, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -1986,7 +2141,9 @@ def fit_saxs_rad_ang_g2( result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline) if function == "flow_para_function" or function == "flow_para": - result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow) + result = dict( + beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow + ) if function == "stretched_vibration": result = dict(beta=beta, rate=rate, alpha=alpha, baseline=baseline, freq=freq) @@ -2022,7 +2179,13 @@ def save_seg_saxs_g2(g2, res_pargs, time_label=True, *argv, **kwargs): if time_label: dt = datetime.now() - CurTime = "%s%02d%02d-%02d%02d-" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) + CurTime = "%s%02d%02d-%02d%02d-" % ( + dt.year, + dt.month, + dt.day, + dt.hour, + dt.minute, + ) filename = os.path.join(path, "g2-%s-%s.csv" % (uid, CurTime)) else: filename = os.path.join(path, "uid=%s--g2.csv" % (uid)) @@ -2171,8 +2334,12 @@ def multi_uids_saxs_flow_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2204,8 +2371,12 @@ def multi_uids_saxs_flow_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " % good_start + ) print("The good_end frame number is: %s " % good_end_) norm = None @@ -2231,7 +2402,13 @@ def multi_uids_saxs_flow_xpcs_analysis( ) else: g2, lag_stepsv = cal_g2p( - FD, seg_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + seg_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_stepsv): @@ -2244,7 +2421,14 @@ def multi_uids_saxs_flow_xpcs_analysis( path=data_dir_, uid=uid + "_1a_mq%s" % conf, ) - save_g2(g2, taus=taus, qr=rcen, qz=acen, uid=uid + "_1a_mq%s" % conf, path=data_dir_) + save_g2( + g2, + taus=taus, + qr=rcen, + qz=acen, + uid=uid + "_1a_mq%s" % conf, + path=data_dir_, + ) if nconf == 0: g2s[run_seq + 1][i]["v"] = g2 # perpendular @@ -2353,7 +2537,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) dfv = save_g2_fit_para_tocsv( - g2_fit_result, filename=uid + "_1a_mq" + conf + "_fit_para", path=data_dir_ + g2_fit_result, + filename=uid + "_1a_mq" + conf + "_fit_para", + path=data_dir_, ) fit_q_rate( @@ -2365,7 +2551,9 @@ def multi_uids_saxs_flow_xpcs_analysis( ) # psave_obj( fit_result, data_dir_ + 'uid=%s-g2-fit-para'%uid ) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters FD = 0 avg_img, imgsum, bad_frame_list = [0, 0, 0] @@ -2490,8 +2678,12 @@ def multi_uids_saxs_xpcs_analysis( md["sample"] = "sample" dpix = md["x_pixel_size"] * 1000.0 # in mm, eiger 4m is 0.075 mm - lambda_ = md["incident_wavelength"] # wavelegth of the X-rays in Angstroms - Ldet = md["detector_distance"] * 1000 # detector to sample distance (mm) + lambda_ = md[ + "incident_wavelength" + ] # wavelegth of the X-rays in Angstroms + Ldet = ( + md["detector_distance"] * 1000 + ) # detector to sample distance (mm) exposuretime = md["count_time"] acquisition_period = md["frame_time"] timeperframe = acquisition_period # for g2 @@ -2523,8 +2715,13 @@ def multi_uids_saxs_xpcs_analysis( good_end_ = good_end FD = Multifile(filename, good_start, good_end_) - good_start = max(good_start, np.where(np.array(imgsum) > min_inten)[0][0]) - print("With compression, the good_start frame number is: %s " % good_start) + good_start = max( + good_start, np.where(np.array(imgsum) > min_inten)[0][0] + ) + print( + "With compression, the good_start frame number is: %s " + % good_start + ) print("The good_end frame number is: %s " % good_end_) hmask = create_hot_pixel_mask(avg_img, 1e8) @@ -2542,11 +2739,23 @@ def multi_uids_saxs_xpcs_analysis( norm = get_pixelist_interp_iq(qp, iq, ring_mask, center) if not para_run: g2, lag_steps_ = cal_g2c( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) else: g2, lag_steps_ = cal_g2p( - FD, ring_mask, bad_frame_list, good_start, num_buf=8, imgsum=None, norm=norm + FD, + ring_mask, + bad_frame_list, + good_start, + num_buf=8, + imgsum=None, + norm=norm, ) if len(lag_steps) < len(lag_steps_): @@ -2566,7 +2775,11 @@ def multi_uids_saxs_xpcs_analysis( good_series = apply_mask(imgsa[good_start:], mask) imgsum, bad_frame_list = get_each_frame_intensity( - good_series, sampling=sampling, bad_pixel_threshold=1.2e8, plot_=False, uid=uid + good_series, + sampling=sampling, + bad_pixel_threshold=1.2e8, + plot_=False, + uid=uid, ) bad_image_process = False @@ -2575,7 +2788,12 @@ def multi_uids_saxs_xpcs_analysis( print(bad_image_process) g2, lag_steps_ = cal_g2( - good_series, ring_mask, bad_image_process, bad_frame_list, good_start, num_buf=8 + good_series, + ring_mask, + bad_image_process, + bad_frame_list, + good_start, + num_buf=8, ) if len(lag_steps) < len(lag_steps_): lag_steps = lag_step_ @@ -2583,7 +2801,9 @@ def multi_uids_saxs_xpcs_analysis( taus_ = lag_steps_ * timeperframe taus = lag_steps * timeperframe - res_pargs = dict(taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid) + res_pargs = dict( + taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid + ) save_saxs_g2(g2, res_pargs) # plot_saxs_g2( g2, taus, vlim=[0.95, 1.05], res_pargs=res_pargs) if fit: @@ -2592,15 +2812,31 @@ def multi_uids_saxs_xpcs_analysis( res_pargs, function="stretched", vlim=[0.95, 1.05], - fit_variables={"baseline": True, "beta": True, "alpha": False, "relaxation_rate": True}, - guess_values={"baseline": 1.0, "beta": 0.05, "alpha": 1.0, "relaxation_rate": 0.01}, + fit_variables={ + "baseline": True, + "beta": True, + "alpha": False, + "relaxation_rate": True, + }, + guess_values={ + "baseline": 1.0, + "beta": 0.05, + "alpha": 1.0, + "relaxation_rate": 0.01, + }, ) fit_q_rate( - q_ring_center[:], fit_result["rate"][:], power_variable=False, uid=uid, path=data_dir_ + q_ring_center[:], + fit_result["rate"][:], + power_variable=False, + uid=uid, + path=data_dir_, ) psave_obj(fit_result, data_dir_ + "uid=%s-g2-fit-para" % uid) - psave_obj(md, data_dir_ + "uid=%s-md" % uid) # save the setup parameters + psave_obj( + md, data_dir_ + "uid=%s-md" % uid + ) # save the setup parameters g2s[run_seq + 1][i] = g2 print("*" * 40) @@ -2612,8 +2848,8 @@ def multi_uids_saxs_xpcs_analysis( def plot_mul_g2(g2s, md): """ Plot multi g2 functions generated by multi_uids_saxs_xpcs_analysis - Will create a large plot with q_number pannels - Each pannel (for each q) will show a number (run number of g2 functions + Will create a large plot with q_number panels + Each panel (for each q) will show a number (run number of g2 functions """ q_ring_center = md["q_ring_center"] @@ -2661,7 +2897,12 @@ def plot_mul_g2(g2s, md): # markersize=6, label = '%s'%sid) ax.semilogx( - taus[1:len_], y[1:len_], marker=markers[i], color=colors[i], markersize=6, label="%s" % sid + taus[1:len_], + y[1:len_], + marker=markers[i], + color=colors[i], + markersize=6, + label="%s" % sid, ) if sn == 0: @@ -2676,13 +2917,15 @@ def get_QrQw_From_RoiMask(roi_mask, setup_pargs): Input: roi_mask: int-type array, 2D roi mask, with q-index starting from 1 setup_pargs: dict, at least with keys as - dpix (det pixel size),lamdba_( wavelength), center( beam center) + dpix (det pixel size),lambda_( wavelength), center( beam center) Output: qr_cen: the q center of each ring qr_wid: the q width of each ring """ - qp_roi, iq_roi, q_roi = get_circular_average(roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs) + qp_roi, iq_roi, q_roi = get_circular_average( + roi_mask, np.array(roi_mask, dtype=bool), pargs=setup_pargs + ) Nmax = roi_mask.max() qr_cen = np.zeros(Nmax) qr_wid = np.zeros(Nmax) diff --git a/pyCHX/v2/_futurepyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py b/pyCHX/v2/_futurepyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py index 98907ef..ecdafc0 100644 --- a/pyCHX/v2/_futurepyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py +++ b/pyCHX/v2/_futurepyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py @@ -2,7 +2,6 @@ from pyCHX.chx_packages import * -from pyCHX.chx_xpcs_xsvs_jupyter import run_xpcs_xsvs_single def XPCS_XSVS_SAXS_Multi( @@ -50,7 +49,9 @@ def XPCS_XSVS_SAXS_Multi( mask_load = mask.copy() username = getpass.getuser() - data_dir0 = os.path.join("/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/") + data_dir0 = os.path.join( + "/XF11ID/analysis/", run_pargs["CYCLE"], username, "Results/" + ) os.makedirs(data_dir0, exist_ok=True) print("Results from this analysis will be stashed in the directory %s" % data_dir0) data_dir = os.path.join(data_dir0, uid_average + "/") @@ -92,7 +93,9 @@ def XPCS_XSVS_SAXS_Multi( wat = get_averaged_data_from_multi_res(multi_res, keystr="wat") if run_t_ROI_Inten: times_roi = get_averaged_data_from_multi_res(multi_res, keystr="times_roi") - mean_int_sets = get_averaged_data_from_multi_res(multi_res, keystr="mean_int_sets") + mean_int_sets = get_averaged_data_from_multi_res( + multi_res, keystr="mean_int_sets" + ) if run_one_time: g2 = get_averaged_data_from_multi_res(multi_res, keystr="g2") @@ -124,10 +127,14 @@ def XPCS_XSVS_SAXS_Multi( "relaxation_rate": 0.01, }, ) - g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir) + g2_fit_paras = save_g2_fit_para_tocsv( + g2_fit_result, filename=uid + "_g2_fit_paras.csv", path=data_dir + ) if run_two_time: - g12b = get_averaged_data_from_multi_res(multi_res, keystr="g12b", different_length=True) + g12b = get_averaged_data_from_multi_res( + multi_res, keystr="g12b", different_length=True + ) g2b = get_averaged_data_from_multi_res(multi_res, keystr="g2b") tausb = get_averaged_data_from_multi_res(multi_res, keystr="tausb") @@ -160,7 +167,9 @@ def XPCS_XSVS_SAXS_Multi( }, ) - g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir) + g2b_fit_paras = save_g2_fit_para_tocsv( + g2_fit_resultb, filename=uid + "_g2b_fit_paras.csv", path=data_dir + ) if run_four_time: g4 = get_averaged_data_from_multi_res(multi_res, keystr="g4") @@ -179,7 +188,9 @@ def XPCS_XSVS_SAXS_Multi( contrast_factorL = get_averaged_data_from_multi_res( multi_res, keystr="contrast_factorL", different_length=False ) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) cont_pds = save_arrays( contrast_factorL, label=times_xsvs, @@ -188,9 +199,15 @@ def XPCS_XSVS_SAXS_Multi( return_res=True, ) if False: - spec_kmean = get_averaged_data_from_multi_res(multi_res, keystr="spec_kmean") - spec_pds = get_averaged_data_from_multi_res(multi_res, keystr="spec_pds", different_length=False) - times_xsvs = get_averaged_data_from_multi_res(multi_res, keystr="times_xsvs", different_length=False) + spec_kmean = get_averaged_data_from_multi_res( + multi_res, keystr="spec_kmean" + ) + spec_pds = get_averaged_data_from_multi_res( + multi_res, keystr="spec_pds", different_length=False + ) + times_xsvs = get_averaged_data_from_multi_res( + multi_res, keystr="times_xsvs", different_length=False + ) spec_his, spec_std = get_his_std_from_pds(spec_pds, his_shapes=None) ML_val, KL_val, K_ = get_xsvs_fit( spec_his, @@ -329,7 +346,9 @@ def XPCS_XSVS_SAXS_Multi( beg=good_start, ) if run_t_ROI_Inten: - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uid, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uid, save=True, path=data_dir + ) if run_one_time: plot_g2_general( @@ -504,7 +523,9 @@ def XPCS_XSVS_SAXS_Multi( for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): Exdt[k] = v if run_two_time: - for k, v in zip(["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b]): + for k, v in zip( + ["tausb", "g2b", "g2b_fit_paras", "g12b"], [tausb, g2b, g2b_fit_paras, g12b] + ): Exdt[k] = v if run_four_time: for k, v in zip(["taus4", "g4"], [taus4, g4]): @@ -656,6 +677,8 @@ def XPCS_XSVS_SAXS_Multi( suf_ids[1][i * step : (i + 1) * step], suf_ids[2][i * step : (i + 1) * step], ) - XPCS_XSVS_SAXS_Multi(0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i]) + XPCS_XSVS_SAXS_Multi( + 0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i] + ) run_time(t0) diff --git a/pyCHX/v2/_futurepyCHX/chx_Fitters2D.py b/pyCHX/v2/_futurepyCHX/chx_Fitters2D.py index 852502e..8be7062 100644 --- a/pyCHX/v2/_futurepyCHX/chx_Fitters2D.py +++ b/pyCHX/v2/_futurepyCHX/chx_Fitters2D.py @@ -12,7 +12,9 @@ def gauss_func(x, xc, amp, sigma, baseline): def gauss2D_func(x, y, xc, amp, sigmax, yc, sigmay, baseline): return ( - amp * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + amp + * np.exp(-((x - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((y - yc) ** 2) / 2.0 / sigmay**2) + baseline ) @@ -75,13 +77,15 @@ def __call__(self, x, y, vx, vy, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["x", "y"], param_names=self.params.keys() + ) # assumes first var is dependent var, and save last params V = np.array([vx, vy]) self._res = self.mod.fit(V, x=x, y=y, params=params) @@ -142,7 +146,7 @@ def guess(self, **kwargs): if kwargs is not None: for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] return paramsdict @@ -189,15 +193,19 @@ def __call__(self, XY, img, **kwargs): # make the parameters from the kwargs for key in self.params.keys(): - if key in kwargs.keys() and key is not "XY": + if key in kwargs.keys() and key != "XY": params[key].value = kwargs[key] else: # then guess params[key].value = guesskeys[key] - self.mod = Model(self.fitfunc, independent_vars=["XY"], param_names=self.params.keys()) + self.mod = Model( + self.fitfunc, independent_vars=["XY"], param_names=self.params.keys() + ) # assumes first var is dependent var - res = self.mod.fit(img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs) + res = self.mod.fit( + img.ravel(), XY=(XY[0].ravel(), XY[1].ravel()), params=params, **kwargs + ) ## old version, only return values # add reduced chisq to parameter list # res.best_values['chisq']=res.redchi @@ -269,7 +277,9 @@ def __call__(self, img, x=None, y=None, **kwargs): self.params["amp"].min = 0 return super(Gauss2DFitter, self).__call__(XY, img, **kwargs) - def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0): + def fitfunc( + self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigmay=1.0 + ): """ xy : 2 by N by N matrix containing x and y xy[0] : x @@ -287,7 +297,9 @@ def fitfunc(self, XY, xc=None, yc=None, amp=1.0, baseline=0.0, sigmax=1.0, sigma yc = X.shape[0] // 2 return ( - amp * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + amp + * np.exp(-((X - xc) ** 2) / 2.0 / sigmax**2) + * np.exp(-((Y - yc) ** 2) / 2.0 / sigmay**2) + baseline ) @@ -334,7 +346,7 @@ def guess(self, img, XY=None, **kwargs): paramsdict["sigmay"] = 1 # print( paramsdict ) for key in kwargs.keys(): - if key in paramsdict and key is not "xy": + if key in paramsdict and key != "xy": paramsdict[key] = kwargs[key] # print( paramsdict ) return paramsdict diff --git a/pyCHX/v2/_futurepyCHX/chx_compress.py b/pyCHX/v2/_futurepyCHX/chx_compress.py index 8ac7184..d57c2d2 100644 --- a/pyCHX/v2/_futurepyCHX/chx_compress.py +++ b/pyCHX/v2/_futurepyCHX/chx_compress.py @@ -1,11 +1,8 @@ -import gc import os import pickle as pkl import shutil import struct import sys -from contextlib import closing -from glob import iglob from multiprocessing import Pool import dill @@ -14,7 +11,7 @@ # imports handler from CHX # this is where the decision is made whether or not to use dask # from chxtools.handlers import EigerImages, EigerHandler -from eiger_io.fs_handler import EigerHandler, EigerImages +from eiger_io.fs_handler import EigerImages from tqdm import tqdm from pyCHX.chx_generic_functions import ( @@ -28,7 +25,7 @@ reverse_updown, rot90_clockwise, ) -from pyCHX.chx_libs import RUN_GUI, LogNorm, datetime, db, getpass, np, os, roi, time +from pyCHX.chx_libs import RUN_GUI, LogNorm, db, np, os, roi, time def run_dill_encoded(what): @@ -37,7 +34,9 @@ def run_dill_encoded(what): def apply_async(pool, fun, args, callback=None): - return pool.apply_async(run_dill_encoded, (dill.dumps((fun, args)),), callback=callback) + return pool.apply_async( + run_dill_encoded, (dill.dumps((fun, args)),), callback=callback + ) def map_async(pool, fun, args): @@ -196,7 +195,9 @@ def compress_eigerdata( images_per_file=images_per_file, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) beg = 0 return read_compressed_eigerdata( mask, @@ -243,7 +244,9 @@ def read_compressed_eigerdata( CAL = True else: try: - mask, avg_img, imgsum, bad_frame_list_ = pkl.load(open(filename + ".pkl", "rb")) + mask, avg_img, imgsum, bad_frame_list_ = pkl.load( + open(filename + ".pkl", "rb") + ) except: CAL = True if CAL: @@ -308,7 +311,9 @@ def para_compress_eigerdata( if not copy_rawdata: images_ = EigerImages(data_path, images_per_file, md) else: - print("Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data.") + print( + "Due to a IO problem running on GPFS. The raw data will be copied to /tmp_data/Data." + ) print("Copying...") copy_data(data_path, new_path) # print(data_path, new_path) @@ -328,11 +333,17 @@ def para_compress_eigerdata( N = int(np.ceil(N / bins)) Nf = int(np.ceil(N / num_sub)) if Nf > cpu_core_number: - print("The process number is larger than %s (XF11ID server core number)" % cpu_core_number) + print( + "The process number is larger than %s (XF11ID server core number)" + % cpu_core_number + ) num_sub_old = num_sub num_sub = int(np.ceil(N / cpu_core_number)) Nf = int(np.ceil(N / num_sub)) - print("The sub compressed file number was changed from %s to %s" % (num_sub_old, num_sub)) + print( + "The sub compressed file number was changed from %s to %s" + % (num_sub_old, num_sub) + ) create_compress_header(md, filename + "-header", nobytes, bins, rot90=rot90) # print( 'done for header here') # print(data_path_, images_per_file) @@ -380,7 +391,7 @@ def para_compress_eigerdata( print("Bad frame list are: %s" % bad_frame_list) else: print("No bad frames are involved.") - print("Combining the seperated compressed files together...") + print("Combining the separated compressed files together...") combine_compressed(filename, Nf, del_old=True) del results del res_ @@ -393,7 +404,9 @@ def para_compress_eigerdata( def combine_compressed(filename, Nf, del_old=True): old_files = np.concatenate( - np.array([[filename + "-header"], [filename + "_temp-%i.tmp" % i for i in range(Nf)]]) + np.array( + [[filename + "-header"], [filename + "_temp-%i.tmp" % i for i in range(Nf)]] + ) ) combine_binary_files(filename, old_files, del_old) @@ -451,15 +464,22 @@ def para_segment_compress_eigerdata( num_sub *= bins if N % num_sub: Nf = N // num_sub + 1 - print("The average image intensity would be slightly not correct, about 1% error.") - print("Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image") + print( + "The average image intensity would be slightly not correct, about 1% error." + ) + print( + "Please give a num_sub to make reminder of Num_images/num_sub =0 to get a correct avg_image" + ) else: Nf = N // num_sub print("It will create %i temporary files for parallel compression." % Nf) if Nf > num_max_para_process: N_runs = np.int(np.ceil(Nf / float(num_max_para_process))) - print("The parallel run number: %s is larger than num_max_para_process: %s" % (Nf, num_max_para_process)) + print( + "The parallel run number: %s is larger than num_max_para_process: %s" + % (Nf, num_max_para_process) + ) else: N_runs = 1 result = {} @@ -535,7 +555,9 @@ def segment_compress_eigerdata( else: images = EigerImages(data_path, images_per_file, md)[N1:N2] if reverse: - images = reverse_updown(EigerImages(data_path, images_per_file, md))[N1:N2] + images = reverse_updown(EigerImages(data_path, images_per_file, md))[ + N1:N2 + ] if rot90: images = rot90_clockwise(images) @@ -577,7 +599,11 @@ def segment_compress_eigerdata( v = np.ravel(np.array(img, dtype=dtype))[p] dlen = len(p) imgsum[n] = v.sum() - if (dlen == 0) or (imgsum[n] > bad_pixel_threshold) or (imgsum[n] <= bad_pixel_low_threshold): + if ( + (dlen == 0) + or (imgsum[n] > bad_pixel_threshold) + or (imgsum[n] <= bad_pixel_low_threshold) + ): dlen = 0 fp.write(struct.pack("@I", dlen)) else: @@ -588,12 +614,16 @@ def segment_compress_eigerdata( if bins == 1: fp.write(struct.pack("@{}{}".format(dlen, "ih"[nobytes == 2]), *v)) else: - fp.write(struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v)) # n +=1 + fp.write( + struct.pack("@{}{}".format(dlen, "dd"[nobytes == 2]), *v) + ) # n +=1 del p, v, img fp.flush() fp.close() avg_img /= good_count - bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + bad_frame_list = (np.array(imgsum) > bad_pixel_threshold) | ( + np.array(imgsum) <= bad_pixel_low_threshold + ) sys.stdout.write("#") sys.stdout.flush() # del images, mask, avg_img, imgsum, bad_frame_list @@ -844,7 +874,8 @@ def init_compress_eigerdata( avg_img /= good_count bad_frame_list = np.where( - (np.array(imgsum) > bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold) + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) )[0] # bad_frame_list1 = np.where( np.array(imgsum) > bad_pixel_threshold )[0] # bad_frame_list2 = np.where( np.array(imgsum) < bad_pixel_low_threshold )[0] @@ -1046,7 +1077,9 @@ def __init__(self, FD, bins=100): self.FD = FD if (FD.end - FD.beg) % bins: - print("Please give a better bins number and make the length of FD/bins= integer") + print( + "Please give a better bins number and make the length of FD/bins= integer" + ) else: self.bins = bins self.md = FD.md @@ -1055,7 +1088,12 @@ def __init__(self, FD, bins=100): Nimg = FD.end - FD.beg slice_num = Nimg // bins self.end = slice_num - self.time_edge = np.array(create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins)) + FD.beg + self.time_edge = ( + np.array( + create_time_slice(N=Nimg, slice_num=slice_num, slice_width=bins) + ) + + FD.beg + ) self.get_bin_frame() def get_bin_frame(self): @@ -1065,7 +1103,9 @@ def get_bin_frame(self): # print (n) t1, t2 = self.time_edge[n] # print( t1, t2) - self.frames[:, :, n] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + self.frames[:, :, n] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) def rdframe(self, n): return self.frames[:, :, n] @@ -1092,7 +1132,7 @@ def __init__(self, filename, mode="rb"): if mode == "wb": raise ValueError("Write mode 'wb' not supported yet") if mode != "rb" and mode != "wb": - raise ValueError("Error, mode must be 'rb' or 'wb'" "got : {}".format(mode)) + raise ValueError("Error, mode must be 'rb' or 'wb'got : {}".format(mode)) self._filename = filename self._mode = mode # open the file descriptor @@ -1179,7 +1219,9 @@ def _read_raw(self, n): Reads from current cursor in file. """ if n > self.Nframes: - raise KeyError("Error, only {} frames, asked for {}".format(self.Nframes, n)) + raise KeyError( + "Error, only {} frames, asked for {}".format(self.Nframes, n) + ) # dlen is 4 bytes cur = self.frame_indexes[n] dlen = np.frombuffer(self._fd[cur : cur + 4], dtype=" bad_pixel_threshold) | (np.array(imgsum) <= bad_pixel_low_threshold))[0] + np.where( + (np.array(imgsum) > bad_pixel_threshold) + | (np.array(imgsum) <= bad_pixel_low_threshold) + )[0] + FD.beg ) diff --git a/pyCHX/v2/_futurepyCHX/chx_compress_analysis.py b/pyCHX/v2/_futurepyCHX/chx_compress_analysis.py index c51f02a..d971940 100644 --- a/pyCHX/v2/_futurepyCHX/chx_compress_analysis.py +++ b/pyCHX/v2/_futurepyCHX/chx_compress_analysis.py @@ -1,13 +1,8 @@ from __future__ import absolute_import, division, print_function import logging -import os -import struct -from collections import namedtuple import matplotlib.pyplot as plt -from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags from tqdm import tqdm from pyCHX.chx_generic_functions import save_arrays @@ -17,18 +12,10 @@ from pyCHX.chx_libs import ( RUN_GUI, Figure, - LogNorm, colors, - colors_, - datetime, - db, - getpass, markers, - markers_, np, - os, roi, - time, ) logger = logging.getLogger(__name__) @@ -36,21 +23,17 @@ from modest_image import imshow from pyCHX.chx_compress import ( - Multifile, - compress_eigerdata, get_avg_imgc, - get_each_frame_intensityc, - init_compress_eigerdata, mean_intensityc, - pass_FD, - read_compressed_eigerdata, ) from pyCHX.chx_generic_functions import find_bad_pixels_FD # from pyCHX.chx_compress import * -def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15): +def get_time_edge_avg_img( + FD, frame_edge, show_progress=True, apply_threshold=False, threshold=15 +): """YG Dev Nov 14, 2017@CHX Update@2019/6/12 with option of apply a threshold for each frame Get averaged img by giving FD and frame edges @@ -76,7 +59,9 @@ def get_time_edge_avg_img(FD, frame_edge, show_progress=True, apply_threshold=Fa for i in tqdm(range(Nt)): t1, t2 = frame_edge[i] if not apply_threshold: - d[i] = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress) + d[i] = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=show_progress + ) else: dti = np.zeros([t2 - t1, avg_imgi.shape[0], avg_imgi.shape[1]]) j = 0 @@ -173,7 +158,9 @@ def cal_waterfallc( norm = np.bincount(qind)[1:] n = 0 # for i in tqdm(range( FD.beg , FD.end )): - for i in tqdm(range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex): + for i in tqdm( + range(FD.beg, FD.end, sampling), desc="Get waterfall for q index=%s" % qindex + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 @@ -185,7 +172,9 @@ def cal_waterfallc( watf_ = watf.copy() watf = np.zeros([watf_.shape[0], waterfall_roi_size[0]]) for i in range(waterfall_roi_size[1]): - watf += watf_[:, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1)] + watf += watf_[ + :, waterfall_roi_size[0] * i : waterfall_roi_size[0] * (i + 1) + ] watf /= waterfall_roi_size[0] if save: @@ -246,7 +235,9 @@ def plot_waterfallc( vmin = wat.min() if aspect is None: aspect = wat.shape[0] / wat.shape[1] - im = imshow(ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation) + im = imshow( + ax, wat.T, cmap=cmap, vmax=vmax, extent=extent, interpolation=interpolation + ) # im = ax.imshow(wat.T, cmap='viridis', vmax=vmax,extent= extent,interpolation = interpolation ) fig.colorbar(im) ax.set_aspect(aspect) @@ -265,7 +256,9 @@ def plot_waterfallc( return fig, ax, im -def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs): +def get_waterfallc( + FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=False, *argv, **kwargs +): """plot waterfall for a giving compressed file FD: class object, the compressed file handler @@ -304,12 +297,16 @@ def get_waterfallc(FD, labeled_array, qindex=1, aspect=1.0, vmax=None, save=Fals return wat -def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs): +def cal_each_ring_mean_intensityc( + FD, ring_mask, sampling=1, timeperframe=None, multi_cor=False, *argv, **kwargs +): """ get time dependent mean intensity of each ring """ - mean_int_sets, index_list = mean_intensityc(FD, ring_mask, sampling, index=None, multi_cor=multi_cor) + mean_int_sets, index_list = mean_intensityc( + FD, ring_mask, sampling, index=None, multi_cor=multi_cor + ) if timeperframe is None: times = np.arange(FD.end - FD.beg) + FD.beg # get the time for each frame else: @@ -318,7 +315,9 @@ def cal_each_ring_mean_intensityc(FD, ring_mask, sampling=1, timeperframe=None, return times, mean_int_sets -def plot_each_ring_mean_intensityc(times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs): +def plot_each_ring_mean_intensityc( + times, mean_int_sets, xlabel="Frame", save=False, *argv, **kwargs +): """ Plot time dependent mean intensity of each ring """ diff --git a/pyCHX/v2/_futurepyCHX/chx_correlation.py b/pyCHX/v2/_futurepyCHX/chx_correlation.py index 2ef23d2..37d1dc2 100644 --- a/pyCHX/v2/_futurepyCHX/chx_correlation.py +++ b/pyCHX/v2/_futurepyCHX/chx_correlation.py @@ -39,6 +39,7 @@ """ This module is for functions specific to time correlation """ + from __future__ import absolute_import, division, print_function from collections import namedtuple @@ -272,7 +273,7 @@ def lazy_one_time(image_iterable, num_levels, num_bufs, labels, internal_state=N ------ namedtuple A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -394,7 +395,7 @@ def multi_tau_auto_corr(num_levels, num_bufs, labels, images): author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -463,7 +464,9 @@ def two_time_corr(labels, images, num_frames, num_bufs, num_levels=1): return two_time_state_to_results(result) -def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None): +def lazy_two_time( + labels, images, num_frames, num_bufs, num_levels=1, two_time_internal_state=None +): """Generator implementation of two-time correlation If you do not want multi-tau correlation, set num_levels to 1 and num_bufs to the number of images you wish to correlate @@ -495,7 +498,7 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -523,7 +526,9 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i 010401(1-4), 2007. """ if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state @@ -579,7 +584,10 @@ def lazy_two_time(labels, images, num_frames, num_bufs, num_levels=1, two_time_i t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) @@ -699,7 +707,9 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: g2[:, tind1, tind2] = tmp_binned / (pi_binned * fi_binned) * num_pixels @@ -805,7 +815,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -1051,22 +1063,32 @@ def __call__(self, img1, img2=None, normalization=None): self.tmpimgs[i].ravel()[self.subpxlsts[i]] = img1.ravel()[self.pxlsts[i]] if not self_correlation: self.tmpimgs2[i] *= 0 - self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[self.pxlsts[i]] + self.tmpimgs2[i].ravel()[self.subpxlsts[i]] = img2.ravel()[ + self.pxlsts[i] + ] # multiply by maskcorrs > 0 to ignore invalid regions if self_correlation: ccorr = _cross_corr(self.tmpimgs[i]) * (self.maskcorrs[i] > 0) else: - ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * (self.maskcorrs[i] > 0) + ccorr = _cross_corr(self.tmpimgs[i], self.tmpimgs2[i]) * ( + self.maskcorrs[i] > 0 + ) # now handle the normalizations if "symavg" in normalization: # do symmetric averaging - Icorr = _cross_corr(self.tmpimgs[i] * self.submasks[i], self.submasks[i]) + Icorr = _cross_corr( + self.tmpimgs[i] * self.submasks[i], self.submasks[i] + ) if self_correlation: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs[i] * self.submasks[i] + ) else: - Icorr2 = _cross_corr(self.submasks[i], self.tmpimgs2[i] * self.submasks[i]) + Icorr2 = _cross_corr( + self.submasks[i], self.tmpimgs2[i] * self.submasks[i] + ) # there is an extra condition that Icorr*Icorr2 != 0 w = np.where(np.abs(Icorr * Icorr2) > 0) ccorr[w] *= self.maskcorrs[i][w] / Icorr[w] / Icorr2[w] @@ -1074,7 +1096,10 @@ def __call__(self, img1, img2=None, normalization=None): if "regular" in normalization: # only run on overlapping regions for correlation w = self.pxlst_maskcorrs[i] - ccorr[w] /= self.maskcorrs[i][w] * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ccorr[w] /= ( + self.maskcorrs[i][w] + * np.average(self.tmpimgs[i].ravel()[self.subpxlsts[i]]) ** 2 + ) ccorrs.append(ccorr) diff --git a/pyCHX/v2/_futurepyCHX/chx_correlationc.py b/pyCHX/v2/_futurepyCHX/chx_correlationc.py index fb31982..864099f 100644 --- a/pyCHX/v2/_futurepyCHX/chx_correlationc.py +++ b/pyCHX/v2/_futurepyCHX/chx_correlationc.py @@ -4,7 +4,6 @@ This module is for computation of time correlation by using compressing algorithm """ - from __future__ import absolute_import, division, print_function import logging @@ -310,7 +309,9 @@ def _validate_and_transform_inputs(num_bufs, num_levels, labels): length of each levels """ if num_bufs % 2 != 0: - raise ValueError("There must be an even number of `num_bufs`. You " "provided %s" % num_bufs) + raise ValueError( + "There must be an even number of `num_bufs`. You provided %s" % num_bufs + ) label_array, pixel_list = extract_label_indices(labels) # map the indices onto a sequential list of integers starting at 1 @@ -398,7 +399,9 @@ def _init_state_one_time(num_levels, num_bufs, labels, cal_error=False): # matrix for normalizing G into g2 future_intensity = np.zeros_like(G) if cal_error: - G_all = np.zeros((int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64) + G_all = np.zeros( + (int((num_levels + 1) * num_bufs / 2), len(pixel_list)), dtype=np.float64 + ) # matrix for normalizing G into g2 past_intensity_all = np.zeros_like(G_all) @@ -502,7 +505,7 @@ def lazy_one_time( ------- A `results` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - `g2`: the normalized correlation shape is (len(lag_steps), num_rois) - `lag_steps`: the times at which the correlation was computed @@ -923,7 +926,7 @@ def multi_tau_auto_corr( author: Mark Sutton For parameter description, please reference the docstring for lazy_one_time. Note that there is an API difference between this function - and `lazy_one_time`. The `images` arugment is at the end of this function + and `lazy_one_time`. The `images` argument is at the end of this function signature here for backwards compatibility, but is the first argument in the `lazy_one_time()` function. The semantics of the variables remain unchanged. @@ -946,7 +949,9 @@ def multi_tau_auto_corr( return result.g2, result.lag_steps -def multi_tau_two_time_auto_corr(num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None): +def multi_tau_two_time_auto_corr( + num_lev, num_buf, ring_mask, FD, bad_frame_list=None, imgsum=None, norm=None +): """Wraps generator implementation of multi-tau two time correlation This function computes two-time correlation Original code : author: Yugang Zhang @@ -1011,7 +1016,7 @@ def lazy_two_time( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -1040,7 +1045,9 @@ def lazy_two_time( num_frames = FD.end - FD.beg if two_time_internal_state is None: - two_time_internal_state = _init_state_two_time(num_levels, num_bufs, labels, num_frames) + two_time_internal_state = _init_state_two_time( + num_levels, num_bufs, labels, num_frames + ) # create a shorthand reference to the results and state named tuple s = two_time_internal_state qind, pixelist = roi.extract_label_indices(labels) @@ -1111,7 +1118,10 @@ def lazy_two_time( t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -1231,9 +1241,13 @@ def _two_time_process( if not isinstance(current_img_time, int): nshift = 2 ** (level - 1) for i in range(-nshift + 1, nshift + 1): - g2[:, int(tind1 + i), int(tind2 + i)] = (tmp_binned / (pi_binned * fi_binned)) * num_pixels + g2[:, int(tind1 + i), int(tind2 + i)] = ( + tmp_binned / (pi_binned * fi_binned) + ) * num_pixels else: - g2[:, int(tind1), int(tind2)] = tmp_binned / (pi_binned * fi_binned) * num_pixels + g2[:, int(tind1), int(tind2)] = ( + tmp_binned / (pi_binned * fi_binned) * num_pixels + ) # print( num_pixels ) @@ -1343,11 +1357,16 @@ def cal_c12c( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) c12, lag_steps, state = multi_tau_two_time_auto_corr( @@ -1384,11 +1403,16 @@ def cal_g2c( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) if cal_error: @@ -1437,8 +1461,10 @@ def cal_g2c( g2[:g_max, qi - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, qi - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) print("G2 with error bar calculation DONE!") @@ -1498,7 +1524,7 @@ def __init__( pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - norm_inten: if True, each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + norm_inten: if True, each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if norm_inten is True: qind has to be given @@ -1552,14 +1578,20 @@ def get_data(self): pxlist = timg[p[w]] - 1 # np.bincount( qind[pxlist], weight= - if self.mean_int_sets is not None: # for each frame will normalize each ROI by it's averaged value + if ( + self.mean_int_sets is not None + ): # for each frame will normalize each ROI by it's averaged value for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1613,7 +1645,7 @@ def __init__( pixelist: 1-D array, interest pixel list norm: each q-ROI of each frame is normalized by the corresponding q-ROI of time averaged intensity imgsum: each q-ROI of each frame is normalized by the total intensity of the corresponding frame, should have the same time sequences as FD, e.g., imgsum[10] corresponding to FD[10] - mean_int_sets: each q-ROI of each frame is normlized by total intensity of the correponding q-ROI of the corresponding frame + mean_int_sets: each q-ROI of each frame is normalized by total intensity of the corresponding q-ROI of the corresponding frame qind: the index of each ROI in one frame, i.e., q if mean_int_sets is not None: qind has to be not None @@ -1666,14 +1698,20 @@ def get_data(self): w = np.where(timg[p])[0] pxlist = timg[p[w]] - 1 - if self.mean_int_sets is not None: # for normalization of each averaged ROI of each frame + if ( + self.mean_int_sets is not None + ): # for normalization of each averaged ROI of each frame for j in range(noqs): # if i ==100: # if j==0: # print( self.mean_int_sets[i][j] ) # print( qind_[ noprs[j]: noprs[j+1] ] ) - Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[i][j] - norm_Mean_Int_Qind = Mean_Int_Qind[pxlist] # self.mean_int_set or Mean_Int_Qind[pxlist] + Mean_Int_Qind[qind_[noprs[j] : noprs[j + 1]]] = self.mean_int_sets[ + i + ][j] + norm_Mean_Int_Qind = Mean_Int_Qind[ + pxlist + ] # self.mean_int_set or Mean_Int_Qind[pxlist] # if i==100: # print( i, Mean_Int_Qind[ self.qind== 11 ]) @@ -1749,7 +1787,7 @@ def auto_two_Arrayc(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1765,7 +1803,9 @@ def auto_two_Arrayc(data_pixel, rois, index=None): sum2 = sum1.T # print( qi, qlist, ) # print( g12b[:,:,qi -1 ] ) - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1773,12 +1813,12 @@ def auto_two_Arrayc(data_pixel, rois, index=None): def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): """ Dec 16, 2015, Y.G.@CHX - a numpy operation method to get two-time correlation function by giving explict normalization + a numpy operation method to get two-time correlation function by giving explicit normalization Parameters: data: images sequence, shape as [img[0], img[1], imgs_length] rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs - norm: if not None, shoud be the shape as data_pixel, will normalize two time by this norm + norm: if not None, should be the shape as data_pixel, will normalize two time by this norm if None, will return two time without normalization Options: @@ -1816,7 +1856,7 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1832,7 +1872,9 @@ def auto_two_Arrayc_ExplicitNorm(data_pixel, rois, norm=None, index=None): else: sum1 = 1 sum2 = 1 - g12b[:, :, i] = np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + g12b[:, :, i] = ( + np.dot(data_pixel_qi, data_pixel_qi.T) / sum1 / sum2 / nopr[qi - 1] + ) i += 1 return g12b @@ -1881,7 +1923,7 @@ def two_time_norm(data_pixel, rois, index=None): DO = True except: print( - "The array is too large. The Sever can't handle such big array. Will calulate different Q sequencely" + "The array is too large. The Sever can't handle such big array. Will calculate different Q sequencely" ) """TO be done here """ DO = False @@ -1909,7 +1951,7 @@ def check_normalization(frame_num, q_list, imgsa, data_pixel): frame_num: integer, the number of frame to be checked q_list: list of integer, the list of q to be checked imgsa: the raw data - data_pixel: the normalized data, caculated by fucntion Get_Pixel_Arrayc + data_pixel: the normalized data, calculated by function Get_Pixel_Arrayc Plot the intensities """ fig, ax = plt.subplots(2) diff --git a/pyCHX/v2/_futurepyCHX/chx_correlationp.py b/pyCHX/v2/_futurepyCHX/chx_correlationp.py index 646e750..9ff9356 100644 --- a/pyCHX/v2/_futurepyCHX/chx_correlationp.py +++ b/pyCHX/v2/_futurepyCHX/chx_correlationp.py @@ -3,24 +3,21 @@ yuzhang@bnl.gov This module is for parallel computation of time correlation """ + from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_correlationc import _one_time_process as _one_time_processp from pyCHX.chx_correlationc import _one_time_process_error as _one_time_process_errorp from pyCHX.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq +from pyCHX.chx_correlationc import _validate_and_transform_inputs from pyCHX.chx_libs import tqdm logger = logging.getLogger(__name__) @@ -129,7 +126,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -235,7 +232,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -287,13 +287,20 @@ def cal_c12p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: S = norm.shape @@ -303,7 +310,9 @@ def cal_c12p( :, np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ), ] for i in np.unique(ring_mask)[1:] @@ -313,7 +322,9 @@ def cal_c12p( norm[ np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(ring_mask)[1:] @@ -380,7 +391,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -665,13 +676,20 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] @@ -683,7 +701,9 @@ def cal_g2p( :, np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ), ] for i in np.unique(ring_mask)[1:] @@ -693,7 +713,9 @@ def cal_g2p( norm[ np.in1d( pixelist, - extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[1], + extract_label_indices(np.array(ring_mask == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(ring_mask)[1:] @@ -789,8 +811,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -828,17 +852,24 @@ def cal_GPF( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) if np.min(ring_mask) == 0: qstart = 1 else: qstart = 0 - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[qstart:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[qstart:] @@ -962,8 +993,10 @@ def get_g2_from_ROI_GPF(G, P, F, roi_mask): g2[:g_max, i - 1] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i - 1] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) return g2, g2_err @@ -1025,7 +1058,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/v2/_futurepyCHX/chx_correlationp2.py b/pyCHX/v2/_futurepyCHX/chx_correlationp2.py index 9abe33f..b9c5e0e 100644 --- a/pyCHX/v2/_futurepyCHX/chx_correlationp2.py +++ b/pyCHX/v2/_futurepyCHX/chx_correlationp2.py @@ -5,24 +5,21 @@ Feb 20, 2018 The chx_correlationp2 is for dedug g2 """ + from __future__ import absolute_import, division, print_function import logging -import sys -from collections import namedtuple from multiprocessing import Pool -import dill import numpy as np import skbeam.core.roi as roi from skbeam.core.roi import extract_label_indices -from skbeam.core.utils import multi_tau_lags -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_correlationc import _one_time_process as _one_time_processp from pyCHX.chx_correlationc import _one_time_process_error as _one_time_process_errorp from pyCHX.chx_correlationc import _two_time_process as _two_time_processp -from pyCHX.chx_correlationc import _validate_and_transform_inputs, get_pixelist_interp_iq +from pyCHX.chx_correlationc import _validate_and_transform_inputs from pyCHX.chx_libs import tqdm logger = logging.getLogger(__name__) @@ -131,7 +128,7 @@ def lazy_two_timep( ------ namedtuple A ``results`` object is yielded after every image has been processed. - This `reults` object contains, in this order: + This `results` object contains, in this order: - ``g2``: the normalized correlation shape is (num_rois, len(lag_steps), len(lag_steps)) - ``lag_steps``: the times at which the correlation was computed @@ -232,7 +229,10 @@ def lazy_two_timep( s.buf[level - 1, prev - 1] + s.buf[level - 1, s.cur[level - 1] - 1] ) / 2 t1_idx = (s.count_level[level] - 1) * 2 - current_img_time = ((s.time_ind[level - 1])[t1_idx] + (s.time_ind[level - 1])[t1_idx + 1]) / 2.0 + current_img_time = ( + (s.time_ind[level - 1])[t1_idx] + + (s.time_ind[level - 1])[t1_idx + 1] + ) / 2.0 # time frame for each level s.time_ind[level].append(current_img_time) # make the track_level zero once that level is processed @@ -284,13 +284,20 @@ def cal_c12p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: - print("Bad frame involved and will be precessed!") - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + print("Bad frame involved and will be processed!") + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) if norm is not None: norms = [ @@ -364,7 +371,7 @@ def __init__(self, num_levels, num_bufs, labels, cal_error=False): """YG. DEV Nov, 2016, Initialize class for the generator-based multi-tau for one time correlation - Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correaltion + Jan 1, 2018, Add cal_error option to calculate signal to noise to one time correlation """ ( @@ -641,13 +648,20 @@ def cal_g2p( pass_FD(FD, i) if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) if bad_frame_list is not None: if len(bad_frame_list) != 0: print("%s Bad frames involved and will be discarded!" % len(bad_frame_list)) - noframes -= len(np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0]) + noframes -= len( + np.where(np.in1d(bad_frame_list, range(good_start, FD.end)))[0] + ) print("%s frames will be processed..." % (noframes - 1)) - ring_masks = [np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:]] + ring_masks = [ + np.array(ring_mask == i, dtype=np.int64) for i in np.unique(ring_mask)[1:] + ] qind, pixelist = roi.extract_label_indices(ring_mask) noqs = len(np.unique(qind)) nopr = np.bincount(qind, minlength=(noqs + 1))[1:] @@ -757,8 +771,10 @@ def cal_g2p( g2[:g_max, i] = avgGi[:g_max] / (avgPi[:g_max] * avgFi[:g_max]) g2_err[:g_max, i] = np.sqrt( (1 / (avgFi[:g_max] * avgPi[:g_max])) ** 2 * devGi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 * devFi[:g_max] ** 2 - + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 * devPi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] ** 2 * avgPi[:g_max])) ** 2 + * devFi[:g_max] ** 2 + + (avgGi[:g_max] / (avgFi[:g_max] * avgPi[:g_max] ** 2)) ** 2 + * devPi[:g_max] ** 2 ) Gmax = max(g_max, Gmax) lag_stepsi = res[i][1] @@ -842,7 +858,9 @@ def auto_two_Arrayp(data_pixel, rois, index=None): pool = Pool(processes=len(inputs)) results = {} for i in inputs: - results[i] = pool.apply_async(_get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes]) + results[i] = pool.apply_async( + _get_two_time_for_one_q, [qlist[i], data_pixel_qis[i], nopr, noframes] + ) pool.close() pool.join() res = np.array([results[k].get() for k in list(sorted(results.keys()))]) diff --git a/pyCHX/v2/_futurepyCHX/chx_crosscor.py b/pyCHX/v2/_futurepyCHX/chx_crosscor.py index 28e839b..fbcc5d5 100644 --- a/pyCHX/v2/_futurepyCHX/chx_crosscor.py +++ b/pyCHX/v2/_futurepyCHX/chx_crosscor.py @@ -7,16 +7,14 @@ """ This module is for functions specific to spatial correlation in order to tackle the motion of speckles """ + from __future__ import absolute_import, division, print_function -from collections import namedtuple import numpy as np from scipy.signal import fftconvolve -from skbeam.core.roi import extract_label_indices # from __future__ import absolute_import, division, print_function -from skbeam.core.utils import multi_tau_lags # for a convenient status bar try: @@ -86,7 +84,9 @@ def direct_corss_cor(im1, im2): d1 = im1[j:, i:] d2 = im2[:-j, :-i] # print(i,j) - C[i + Nx, j + Ny] = np.sum(d1 * d2) / (np.average(d1) * np.average(d2) * d1.size) + C[i + Nx, j + Ny] = np.sum(d1 * d2) / ( + np.average(d1) * np.average(d2) * d1.size + ) return C.T @@ -308,7 +308,9 @@ def __call__(self, img1, img2=None, normalization=None, check_res=False): if self_correlation: ccorr[w] /= maskcor[w] * np.average(tmpimg[w]) ** 2 else: - ccorr[w] /= maskcor[w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ccorr[w] /= ( + maskcor[w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ) if check_res: if reg == 0: self.ckn = ccorr.copy() @@ -342,58 +344,15 @@ def _centered(img, sz): import threading -import warnings # from . import sigtools -import numpy as np from numpy import ( - allclose, - angle, - arange, - argsort, array, asarray, - atleast_1d, - atleast_2d, - cast, - dot, - exp, - expand_dims, - iscomplexobj, - isscalar, - mean, - ndarray, - newaxis, - ones, - pi, - poly, - polyadd, - polyder, - polydiv, - polymul, - polysub, - polyval, - prod, - product, - r_, - ravel, - real_if_close, - reshape, - roots, - sort, - sum, - take, - transpose, - unique, - where, - zeros, - zeros_like, ) from numpy.fft import irfftn, rfftn -from scipy import linalg from scipy._lib._version import NumpyVersion -from scipy._lib.six import callable -from scipy.fftpack import fft, fft2, fftfreq, fftn, ifft, ifft2, ifftn, ifftshift +from scipy.fftpack import fftn, ifftn # from ._arraytools import axis_slice, axis_reverse, odd_ext, even_ext, const_ext @@ -492,7 +451,9 @@ def fftconvolve_new(in1, in2, mode="full"): s1 = array(in1.shape) s2 = array(in2.shape) - complex_result = np.issubdtype(in1.dtype, np.complex) or np.issubdtype(in2.dtype, np.complex) + complex_result = np.issubdtype(in1.dtype, np.complex) or np.issubdtype( + in2.dtype, np.complex + ) shape = s1 + s2 - 1 if mode == "valid": @@ -526,7 +487,7 @@ def fftconvolve_new(in1, in2, mode="full"): elif mode == "valid": return _centered(ret, s1 - s2 + 1) else: - raise ValueError("Acceptable mode flags are 'valid'," " 'same', or 'full'.") + raise ValueError("Acceptable mode flags are 'valid', 'same', or 'full'.") def _cross_corr1(img1, img2=None): @@ -758,9 +719,13 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): # do symmetric averaging Icorr = _cross_corr1(tmpimg * self.submasks[reg], self.submasks[reg]) if self_correlation: - Icorr2 = _cross_corr1(self.submasks[reg], tmpimg * self.submasks[reg]) + Icorr2 = _cross_corr1( + self.submasks[reg], tmpimg * self.submasks[reg] + ) else: - Icorr2 = _cross_corr1(self.submasks[reg], tmpimg2 * self.submasks[reg]) + Icorr2 = _cross_corr1( + self.submasks[reg], tmpimg2 * self.submasks[reg] + ) # there is an extra condition that Icorr*Icorr2 != 0 w = np.where(np.abs(Icorr * Icorr2) > 0) # DO WE NEED THIS (use i,j). ccorr[w] *= self.maskcorrs[reg][w] / Icorr[w] / Icorr2[w] @@ -772,7 +737,11 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): if self_correlation: ccorr[w] /= self.maskcorrs[reg][w] * np.average(tmpimg[w]) ** 2 else: - ccorr[w] /= self.maskcorrs[reg][w] * np.average(tmpimg[w]) * np.average(tmpimg2[w]) + ccorr[w] /= ( + self.maskcorrs[reg][w] + * np.average(tmpimg[w]) + * np.average(tmpimg2[w]) + ) ccorrs.append(ccorr) if len(ccorrs) == 1: @@ -784,9 +753,8 @@ def __call__(self, img1, img2=None, normalization=None, desc="cc"): ##for parallel from multiprocessing import Pool -import dill -from pyCHX.chx_compress import apply_async, map_async +from pyCHX.chx_compress import apply_async def run_para_ccorr_sym(ccorr_sym, FD, nstart=0, nend=None, imgsum=None, img_norm=None): diff --git a/pyCHX/v2/_futurepyCHX/chx_generic_functions.py b/pyCHX/v2/_futurepyCHX/chx_generic_functions.py index 0e3c577..fac2cb0 100644 --- a/pyCHX/v2/_futurepyCHX/chx_generic_functions.py +++ b/pyCHX/v2/_futurepyCHX/chx_generic_functions.py @@ -1,18 +1,16 @@ import copy import datetime from os import listdir -from shutil import copyfile import matplotlib.cm as mcm import numpy as np import PIL import pytz import scipy -from matplotlib import cm from modest_image import imshow from scipy.special import erf -from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta, twotheta_to_q -from skimage.draw import disk, ellipse, line, line_aa, polygon +from skbeam.core.utils import angle_grid, radial_grid, radius_to_twotheta +from skimage.draw import disk, ellipse, polygon from skimage.filters import prewitt # from tqdm import * @@ -87,7 +85,9 @@ def generate_h5_list(inDir, filename): for fp_ in fp: if ".h5" in fp_: append_txtfile(filename=filename, data=np.array([FP_ + "/" + fp_])) - print("The full path of all the .h5 in %s has been saved in %s." % (inDir, filename)) + print( + "The full path of all the .h5 in %s has been saved in %s." % (inDir, filename) + ) print("You can use ./analysis/run_gui to visualize all the h5 file.") @@ -103,7 +103,7 @@ def fit_one_peak_curve(x, y, fit_range=None): fwhm: float, full width at half max intensity of the peak, 2*sigma fwhm_std:float, error bar of the full width at half max intensity of the peak xf: the x in the fit - out: the fitting class resutled from lmfit + out: the fitting class resulted from lmfit """ from lmfit.models import LinearModel, LorentzianModel @@ -248,7 +248,9 @@ def get_zero_nozero_qind_from_roi_mask(roi_mask, mask): return w, w1 -def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, old_cen, geometry): +def get_masked_qval_qwid_dict_using_Rmax( + new_mask, setup_pargs, old_roi_mask, old_cen, geometry +): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask using a Rmax method""" cy, cx = setup_pargs["center"] my, mx = new_mask.shape @@ -277,7 +279,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol "Ldet": setup_pargs["Ldet"], "lambda_": setup_pargs["lambda_"], } - qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict(roi_mask1, Fmask, setup_pargs_, geometry) + qval_dict1, qwid_dict1 = get_masked_qval_qwid_dict( + roi_mask1, Fmask, setup_pargs_, geometry + ) # w = get_zero_qind_from_roi_mask(roi_mask1,Fmask) return qval_dict1, qwid_dict1 # ,w @@ -285,7 +289,9 @@ def get_masked_qval_qwid_dict_using_Rmax(new_mask, setup_pargs, old_roi_mask, ol def get_masked_qval_qwid_dict(roi_mask, mask, setup_pargs, geometry): """YG Dev April 22, 2019 Get qval_dict, qwid_dict by applying mask to roi_mask""" - qval_dict_, qwid_dict_ = get_qval_qwid_dict(roi_mask, setup_pargs, geometry=geometry) + qval_dict_, qwid_dict_ = get_qval_qwid_dict( + roi_mask, setup_pargs, geometry=geometry + ) w, w1 = get_zero_nozero_qind_from_roi_mask(roi_mask, mask) qval_dictx = {k: v for (k, v) in list(qval_dict_.items()) if k not in w} qwid_dictx = {k: v for (k, v) in list(qwid_dict_.items()) if k not in w} @@ -303,7 +309,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): Input: roi_mask: integer type 2D array setup_pargs: dict, should at least contains, center (direct beam center), dpix (in mm), - lamda_: in A-1, Ldet: in mm + lambda_: in A-1, Ldet: in mm e.g., {'Ldet': 1495.0, abs #essential 'center': [-4469, 363], #essential @@ -315,7 +321,7 @@ def get_qval_qwid_dict(roi_mask, setup_pargs, geometry="saxs"): 'uid': 'uid=b85dad'} geometry: support saxs for isotropic transmission SAXS ang_saxs for anisotropic transmission SAXS - flow_saxs for anisotropic transmission SAXS under flow (center symetric) + flow_saxs for anisotropic transmission SAXS under flow (center symmetric) Return: qval_dict: dict, key as q-number, val: q val @@ -608,12 +614,16 @@ def plot_q_g2fitpara_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -643,8 +653,12 @@ def plot_q_g2fitpara_general( ax2 = fig.add_subplot(4, 1, 2) ax3 = fig.add_subplot(4, 1, 3) ax4 = fig.add_subplot(4, 1, 4) - plot1D(x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="") - plot1D(x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="") + plot1D( + x=qi, y=betai, m="o", ls="--", c="k", ax=ax1, legend=r"$\beta$", title="" + ) + plot1D( + x=qi, y=alphai, m="o", ls="--", c="r", ax=ax2, legend=r"$\alpha$", title="" + ) plot1D( x=qi, y=baselinei, @@ -795,12 +809,12 @@ def plot_xy_x2( **kwargs, ): """YG.@CHX 2019/10/ Plot x, y, x2, if have, will plot as twiny( same y, different x) - This funciton is primary for plot q-Iq + This function is primary for plot q-Iq Input: x: one-d array, x in one unit y: one-d array, - x2:one-d array, x in anoter unit + x2:one-d array, x in another unit pargs: dict, could include 'uid', 'path' loglog: if True, if plot x and y in log, by default plot in y-log save: if True, save the plot in the path defined in pargs @@ -853,7 +867,9 @@ def plot_xy_x2( fig.savefig(fp, dpi=fig.dpi) -def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0): +def save_oavs_tifs( + uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1, threshold=0 +): """save oavs as png""" tifs = list(db[uid].data("OAV_image"))[0] try: @@ -866,8 +882,12 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 h = db[uid] oavs = tifs - oav_period = h["descriptors"][0]["configuration"]["OAV"]["data"]["OAV_cam_acquire_period"] - oav_expt = h["descriptors"][0]["configuration"]["OAV"]["data"]["OAV_cam_acquire_time"] + oav_period = h["descriptors"][0]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_period" + ] + oav_expt = h["descriptors"][0]["configuration"]["OAV"]["data"][ + "OAV_cam_acquire_time" + ] oav_times = [] for i in range(len(oavs)): oav_times.append(oav_expt + i * oav_period) @@ -891,7 +911,7 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 plt.imshow(rgb_cont_img, interpolation="none", resample=True, cmap="gray") plt.axis("equal") - cross = [685, 440, 50] # definintion of direct beam: x, y, size + cross = [685, 440, 50] # definition of direct beam: x, y, size plt.plot( [cross[0] - cross[2] / 2, cross[0] + cross[2] / 2], [cross[1], cross[1]], @@ -903,7 +923,9 @@ def save_oavs_tifs(uid, data_dir, brightness_scale=1, scalebar_size=100, scale=1 "r-", ) if pixel_scalebar is not None: - plt.plot([1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5) # scale bar. + plt.plot( + [1100, 1100 + pixel_scalebar], [150, 150], "r-", Linewidth=5 + ) # scale bar. plt.text(1000, 50, text_string, fontsize=14, color="r") plt.text(600, 50, str(oav_times[m])[:5] + " [s]", fontsize=14, color="r") plt.axis("off") @@ -954,7 +976,8 @@ def evalue_array(array, verbose=True): ) if verbose: print( - "The min, max, avg, std of this array are: %s %s %s %s, respectively." % (_min, _max, avg, std) + "The min, max, avg, std of this array are: %s %s %s %s, respectively." + % (_min, _max, avg, std) ) return _min, _max, avg, std @@ -971,7 +994,10 @@ def find_good_xpcs_uids(fuids, Nlim=100, det=["4m", "1m", "500"]): """ guids = [] for i, uid in enumerate(fuids): - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): head = db[uid]["start"] for dec in head["detectors"]: for dt in det: @@ -1005,7 +1031,9 @@ def create_fullImg_with_box( roi_mask = np.zeros(shape, dtype=np.int32) for i in range(box_nx): for j in range(box_ny): - roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = i * box_ny + j + 1 + roi_mask[i * Wrow : (i + 1) * Wrow, j * Wcol : (j + 1) * Wcol] = ( + i * box_ny + j + 1 + ) # roi_mask *= mask return roi_mask @@ -1108,7 +1136,10 @@ def copy_data(old_path, new_path="/tmp_data/data/"): for fp in tqdm(fps): if not os.path.exists(new_path + os.path.basename(fp)): shutil.copy(fp, new_path) - print("The files %s are copied: %s." % (old_path[:-10] + "*", new_path + os.path.basename(fp))) + print( + "The files %s are copied: %s." + % (old_path[:-10] + "*", new_path + os.path.basename(fp)) + ) def delete_data(old_path, new_path="/tmp_data/data/"): @@ -1118,7 +1149,6 @@ def delete_data(old_path, new_path="/tmp_data/data/"): new_path: the new path """ import glob - import shutil # old_path = sud[2][0] # new_path = '/tmp_data/data/' @@ -1142,8 +1172,8 @@ def show_tif_series( ): """ tif_series: list of 2D tiff images - Nx: the number in the row for dispalying - center: the center of iamge (or direct beam pixel) + Nx: the number in the row for displaying + center: the center of image (or direct beam pixel) w: the ROI half size in pixel vmin: the min intensity value for plot vmax: if None, will be max intensity value of the ROI @@ -1186,9 +1216,6 @@ def show_tif_series( return fig, ax -from scipy.special import erf - - def ps(y, shift=0.5, replot=True, logplot="off", x=None): """ Dev 16, 2018 @@ -1223,7 +1250,10 @@ def is_positive(num): for i in range(len(y)): current_positive = is_positive(ym[i]) if current_positive != positive: - list_of_roots.append(x[i - 1] + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1])) + list_of_roots.append( + x[i - 1] + + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1]) + ) positive = not positive if len(list_of_roots) >= 2: FWHM = abs(list_of_roots[-1] - list_of_roots[0]) @@ -1340,13 +1370,19 @@ def create_seg_ring(ring_edges, ang_edges, mask, setup_pargs): flow_geometry=False, ) - roi_mask, good_ind = combine_two_roi_mask(roi_mask_qr, roi_mask_ang, pixel_num_thres=100) - qval_dict_ = get_qval_dict(qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False) + roi_mask, good_ind = combine_two_roi_mask( + roi_mask_qr, roi_mask_ang, pixel_num_thres=100 + ) + qval_dict_ = get_qval_dict( + qr_center=qr, qz_center=ang_center, one_qz_multi_qr=False + ) qval_dict = {i: qval_dict_[k] for (i, k) in enumerate(good_ind)} return roi_mask, qval_dict -def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True): +def find_bad_pixels_FD( + bad_frame_list, FD, img_shape=[514, 1030], threshold=15, show_progress=True +): """Designed to find bad pixel list in 500K threshold: the max intensity in 5K """ @@ -1370,7 +1406,7 @@ def find_bad_pixels_FD(bad_frame_list, FD, img_shape=[514, 1030], threshold=15, def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=15): """DEV by Yugang@CHX, June 6, 2019 Get circular average of a time series using a dynamics mask, which pixel values are defined as - zeors if above a threshold. + zeros if above a threshold. Return an averaged q(pix)-Iq-q(A-1) of the whole time series using bin frames with bin_number Input: FD: the multifile handler for the time series @@ -1380,7 +1416,7 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 'dpix', 'Ldet','lambda_', 'center' bin_number: bin number of the frame threshold: define the dynamics mask, which pixel values are defined as - zeors if above this threshold + zeros if above this threshold Output: qp_saxs: q in pixel iq_saxs: intenstity @@ -1392,14 +1428,19 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 Nimg_ = FD.end - FD.beg # Nimg_ = 100 Nimg = Nimg_ // bin_number - time_edge = np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + beg + time_edge = ( + np.array(create_time_slice(N=Nimg_, slice_num=Nimg, slice_width=bin_number)) + + beg + ) for n in tqdm(range(Nimg)): t1, t2 = time_edge[n] # print(t1,t2) if bin_number == 1: avg_imgi = FD.rdframe(t1) else: - avg_imgi = get_avg_imgc(FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False) + avg_imgi = get_avg_imgc( + FD, beg=t1, end=t2, sampling=1, plot_=False, show_progress=False + ) badpi = find_bad_pixels_FD( np.arange(t1, t2), FD, @@ -1408,7 +1449,9 @@ def get_q_iq_using_dynamic_mask(FD, mask, setup_pargs, bin_number=1, threshold=1 show_progress=False, ) img = avg_imgi * mask * badpi - qp_saxsi, iq_saxsi, q_saxsi = get_circular_average(img, mask * badpi, save=False, pargs=setup_pargs) + qp_saxsi, iq_saxsi, q_saxsi = get_circular_average( + img, mask * badpi, save=False, pargs=setup_pargs + ) # print( img.max()) if t1 == FD.beg: qp_saxs, iq_saxs, q_saxs = ( @@ -1464,10 +1507,10 @@ def get_img_from_iq(qp, iq, img_shape, center): def average_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Average array invovling np.nan along axis + Average array involving np.nan along axis Input: - array: ND array, actually should be oneD or twoD at this stage..TODOLIST for ND + array: AND array, actually should be oneD or twoD at this stage..TODOLIST for AND axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1490,10 +1533,10 @@ def average_array_withNan(array, axis=0, mask=None): def deviation_array_withNan(array, axis=0, mask=None): """YG. Jan 23, 2018 - Get the deviation of array invovling np.nan along axis + Get the deviation of array involving np.nan along axis Input: - array: ND array + array: AND array axis: the average axis mask: bool, same shape as array, if None, will mask all the nan values Output: @@ -1576,10 +1619,14 @@ def get_echos(dat_arr, min_distance=10): """ from skimage.feature import peak_local_max - max_ind = peak_local_max(dat_arr, min_distance) # !!! careful, skimage function reverses the order (wtf?) + max_ind = peak_local_max( + dat_arr, min_distance + ) # !!! careful, skimage function reverses the order (wtf?) min_ind = [] for i in range(len(max_ind[:-1])): - min_ind.append(max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]])) + min_ind.append( + max_ind[i + 1][0] + np.argmin(dat_arr[max_ind[i + 1][0] : max_ind[i][0]]) + ) # unfortunately, skimage function fu$$s up the format: max_ind is an array of a list of lists...fix this: mmax_ind = [] for l in max_ind: @@ -1592,7 +1639,7 @@ def pad_length(arr, pad_val=np.nan): """ arr: 2D matrix pad_val: values being padded - adds pad_val to each row, to make the length of each row equal to the lenght of the longest row of the original matrix + adds pad_val to each row, to make the length of each row equal to the length of the longest row of the original matrix -> used to convert python generic data object to HDF5 native format function fixes python bug in padding (np.pad) integer array with np.nan by LW 12/30/2017 @@ -1634,8 +1681,8 @@ def ls_dir(inDir, have_list=[], exclude_list=[]): """Y.G. Aug 1, 2019 List all filenames in a filefolder inDir: fullpath of the inDir - have_string: only retrun filename containing the string - exclude_string: only retrun filename not containing the string + have_string: only return filename containing the string + exclude_string: only return filename not containing the string """ from os import listdir @@ -1661,7 +1708,7 @@ def ls_dir2(inDir, string=None): """Y.G. Nov 1, 2017 List all filenames in a filefolder (not include hidden files and subfolders) inDir: fullpath of the inDir - string: if not None, only retrun filename containing the string + string: if not None, only return filename containing the string """ from os import listdir from os.path import isfile, join @@ -1669,7 +1716,9 @@ def ls_dir2(inDir, string=None): if string is None: tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) else: - tifs = np.array([f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)]) + tifs = np.array( + [f for f in listdir(inDir) if (isfile(join(inDir, f))) & (string in f)] + ) return tifs @@ -1749,16 +1798,20 @@ def get_roi_nr( qinterest = qslist[q] # qindices = [i for i,x in enumerate(qs) if x == qinterest] qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] - # print('q_indicies: ',qindices) + # print('q_indices: ',qindices) else: qinterest = q - qindices = [i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh] # new + qindices = [ + i for i, x in enumerate(qs) if np.abs(x - qinterest) < q_thresh + ] # new if phi_nr: phiinterest = phislist[phi] phiindices = [i for i, x in enumerate(phis) if x == phiinterest] else: phiinterest = phi - phiindices = [i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh] # new + phiindices = [ + i for i, x in enumerate(phis) if np.abs(x - phiinterest) < p_thresh + ] # new # print('phi: %s phi_index: %s'%(phiinterest,phiindices)) # qindices = [i for i,x in enumerate(qs) if x == qinterest] # phiindices = [i for i,x in enumerate(phis) if x == phiinterest] @@ -1774,7 +1827,14 @@ def get_roi_nr( print(qslist) print("list of available phis:") print(phislist) - print("Roi number for Q= " + str(ret_list[1]) + " and phi= " + str(ret_list[2]) + ": " + str(ret_list[0])) + print( + "Roi number for Q= " + + str(ret_list[1]) + + " and phi= " + + str(ret_list[2]) + + ": " + + str(ret_list[0]) + ) return ret_list @@ -1785,7 +1845,7 @@ def get_fit_by_two_linear( mid_xpoint2=None, xrange=None, ): - """YG Octo 16,2017 Fit a curve with two linear func, the curve is splitted by mid_xpoint, + """YG Octo 16,2017 Fit a curve with two linear func, the curve is split by mid_xpoint, namely, fit the curve in two regions defined by (xmin,mid_xpoint ) and (mid_xpoint2, xmax) Input: x: 1D np.array @@ -1795,9 +1855,9 @@ def get_fit_by_two_linear( Return: D1, gmfit1, D2, gmfit2 : fit parameter (slope, background) of linear fit1 - convinent fit class, gmfit1(x) gives yvale + convenient fit class, gmfit1(x) gives yvale fit parameter (slope, background) of linear fit2 - convinent fit class, gmfit2(x) gives yvale + convenient fit class, gmfit2(x) gives yvale """ if xrange is None: @@ -1829,7 +1889,9 @@ def get_curve_turning_points( """YG Octo 16,2017 Get a turning point of a curve by doing a two-linear fit """ - D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear(x, y, mid_xpoint1, mid_xpoint2, xrange) + D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear( + x, y, mid_xpoint1, mid_xpoint2, xrange + ) return get_cross_point(x, gmfit1, gmfit2) @@ -1837,7 +1899,9 @@ def plot_fit_two_linear_fit(x, y, gmfit1, gmfit2, ax=None): """YG Octo 16,2017 Plot data with two fitted linear func""" if ax is None: fig, ax = plt.subplots() - plot1D(x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="") # logx=True, logy=True ) + plot1D( + x=x, y=y, ax=ax, c="k", legend="data", m="o", ls="" + ) # logx=True, logy=True ) plot1D(x=x, y=gmfit1(x), ax=ax, c="r", m="", ls="-", legend="fit1") plot1D(x=x, y=gmfit2(x), ax=ax, c="b", m="", ls="-", legend="fit2") return ax @@ -1849,7 +1913,10 @@ def linear_fit(x, y, xrange=None): """ if xrange is not None: xmin, xmax = xrange - x1, x2 = find_index(x, xmin, tolerance=None), find_index(x, xmax, tolerance=None) + x1, x2 = ( + find_index(x, xmin, tolerance=None), + find_index(x, xmax, tolerance=None), + ) x_ = x[x1:x2] y_ = y[x1:x2] else: @@ -1961,22 +2028,32 @@ def sgolay2d(z, window_size, order, derivative=None): Z = np.zeros((new_shape)) # top band band = z[0, :] - Z[:half_size, half_size:-half_size] = band - np.abs(np.flipud(z[1 : half_size + 1, :]) - band) + Z[:half_size, half_size:-half_size] = band - np.abs( + np.flipud(z[1 : half_size + 1, :]) - band + ) # bottom band band = z[-1, :] - Z[-half_size:, half_size:-half_size] = band + np.abs(np.flipud(z[-half_size - 1 : -1, :]) - band) + Z[-half_size:, half_size:-half_size] = band + np.abs( + np.flipud(z[-half_size - 1 : -1, :]) - band + ) # left band band = np.tile(z[:, 0].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, :half_size] = band - np.abs(np.fliplr(z[:, 1 : half_size + 1]) - band) + Z[half_size:-half_size, :half_size] = band - np.abs( + np.fliplr(z[:, 1 : half_size + 1]) - band + ) # right band band = np.tile(z[:, -1].reshape(-1, 1), [1, half_size]) - Z[half_size:-half_size, -half_size:] = band + np.abs(np.fliplr(z[:, -half_size - 1 : -1]) - band) + Z[half_size:-half_size, -half_size:] = band + np.abs( + np.fliplr(z[:, -half_size - 1 : -1]) - band + ) # central band Z[half_size:-half_size, half_size:-half_size] = z # top left corner band = z[0, 0] - Z[:half_size, :half_size] = band - np.abs(np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band) + Z[:half_size, :half_size] = band - np.abs( + np.flipud(np.fliplr(z[1 : half_size + 1, 1 : half_size + 1])) - band + ) # bottom right corner band = z[-1, -1] Z[-half_size:, -half_size:] = band + np.abs( @@ -1985,10 +2062,14 @@ def sgolay2d(z, window_size, order, derivative=None): # top right corner band = Z[half_size, -half_size:] - Z[:half_size, -half_size:] = band - np.abs(np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band) + Z[:half_size, -half_size:] = band - np.abs( + np.flipud(Z[half_size + 1 : 2 * half_size + 1, -half_size:]) - band + ) # bottom left corner band = Z[-half_size:, half_size].reshape(-1, 1) - Z[-half_size:, :half_size] = band - np.abs(np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band) + Z[-half_size:, :half_size] = band - np.abs( + np.fliplr(Z[-half_size:, half_size + 1 : 2 * half_size + 1]) - band + ) # solve system and convolve if derivative == None: @@ -2003,7 +2084,9 @@ def sgolay2d(z, window_size, order, derivative=None): elif derivative == "both": c = np.linalg.pinv(A)[1].reshape((window_size, -1)) r = np.linalg.pinv(A)[2].reshape((window_size, -1)) - return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve(Z, -c, mode="valid") + return scipy.signal.fftconvolve(Z, -r, mode="valid"), scipy.signal.fftconvolve( + Z, -c, mode="valid" + ) def load_filelines(fullpath): @@ -2038,7 +2121,7 @@ def extract_data_from_file( good_line_pattern: str, data will be extract below this good_line_pattern Or giving start_row: int good_cols: list of integer, good index of cols - lables: the label of the good_cols + labels: the label of the good_cols #save: False, if True will save the data into a csv file with filename appending csv ?? Return: a pds.dataframe @@ -2122,7 +2205,7 @@ def get_print_uids(start_time, stop_time, return_all_info=False): def get_last_uids(n=-1): """YG Sep 26, 2017 - A Convinient function to copy uid to jupyter for analysis""" + A Convenient function to copy uid to jupyter for analysis""" uid = db[n]["start"]["uid"][:8] sid = db[n]["start"]["scan_id"] m = db[n]["start"]["Measurement"] @@ -2137,14 +2220,16 @@ def get_base_all_filenames(inDir, base_filename_cut_length=-7): base_filename_cut_length: to which length the base name is unique Output: dict: keys, base filename - vales, all realted filename + vales, all related filename """ from os import listdir from os.path import isfile, join tifs = np.array([f for f in listdir(inDir) if isfile(join(inDir, f))]) tifsc = list(tifs.copy()) - utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[::-1] + utifs = np.sort(np.unique(np.array([f[:base_filename_cut_length] for f in tifs])))[ + ::-1 + ] files = {} for uf in utifs: files[uf] = [] @@ -2285,7 +2370,9 @@ def get_mass_center_one_roi(FD, roi_mask, roi_ind): m = roi_mask == roi_ind cx, cy = np.zeros(int((FD.end - FD.beg) / 1)), np.zeros(int((FD.end - FD.beg) / 1)) n = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get mass center of one ROI of each frame" + ): img = FD.rdframe(i) * m c = scipy.ndimage.measurements.center_of_mass(img) cx[n], cy[n] = int(c[0]), int(c[1]) @@ -2419,7 +2506,9 @@ def create_chip_edges_mask(det="1M"): return mask -def create_ellipse_donut(cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0): +def create_ellipse_donut( + cx, cy, wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0 +): Nmax = np.max(np.unique(roi_mask)) rr1, cc1 = ellipse(cy, cx, wy_inner, wx_inner) rr2, cc2 = ellipse(cy, cx, wy_inner + gap, wx_inner + gap) @@ -2485,10 +2574,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -2505,7 +2594,7 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): """ Get taus in log steps ( a multi-taus defined taus ) for a time series with max frame number as fra_max Parameters: - fra_max: integer, the maximun frame number + fra_max: integer, the maximum frame number buf_num (default=8), Return: taus_in_log, a list @@ -2519,12 +2608,14 @@ def get_multi_tau_lag_steps(fra_max, num_bufs=8): return lag_steps[lag_steps < fra_max] -def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8): +def get_series_g2_taus( + fra_max_list, acq_time=1, max_fra_num=None, log_taus=True, num_bufs=8 +): """ Get taus for dose dependent analysis Parameters: fra_max_list: a list, a lsit of largest available frame number - acq_time: acquistion time for each frame + acq_time: acquisition time for each frame log_taus: if true, will use the multi-tau defined taus bu using buf_num (default=8), otherwise, use deltau =1 Return: @@ -2547,8 +2638,8 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True if n > L: warnings.warn( "Warning: the dose value is too large, and please" - "check the maxium dose in this data set and give a smaller dose value." - "We will use the maxium dose of the data." + "check the maximum dose in this data set and give a smaller dose value." + "We will use the maximum dose of the data." ) n = L if log_taus: @@ -2559,11 +2650,13 @@ def get_series_g2_taus(fra_max_list, acq_time=1, max_fra_num=None, log_taus=True return tausd -def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5)): +def check_lost_metadata( + md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * 10 * (-5) +): """Y.G. Dec 31, 2016, check lost metadata Parameter: - md: dict, meta data dictionay + md: dict, meta data dictionary Nimg: number of frames for this uid metadata inc_x0/y0: incident beam center x0/y0, if None, will over-write the md['beam_center_x/y'] pixelsize: if md don't have ['x_pixel_size'], the pixelsize will add it @@ -2611,10 +2704,16 @@ def check_lost_metadata(md, Nimg=None, inc_x0=None, inc_y0=None, pixelsize=7.5 * timeperframe = acquisition_period if inc_x0 is not None: mdn["beam_center_x"] = inc_y0 - print("Beam_center_x has been changed to %s. (no change in raw metadata): " % inc_y0) + print( + "Beam_center_x has been changed to %s. (no change in raw metadata): " + % inc_y0 + ) if inc_y0 is not None: mdn["beam_center_y"] = inc_x0 - print("Beam_center_y has been changed to %s. (no change in raw metadata): " % inc_x0) + print( + "Beam_center_y has been changed to %s. (no change in raw metadata): " + % inc_x0 + ) center = [ int(mdn["beam_center_x"]), int(mdn["beam_center_y"]), @@ -2765,11 +2864,11 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): bad_uids_index: a list of known bad uid list, default is None Return: guids: list, good uids - buids, list, bad uids + builds, list, bad uids """ import random - buids = [] + builds = [] guids = list(uids) # print( guids ) if bad_uids_index is None: @@ -2783,20 +2882,23 @@ def check_bad_uids(uids, mask, img_choice_N=10, bad_uids_index=None): imgsa = apply_mask(imgs, mask) avg_img = get_avg_img(imgsa, img_samp_index, plot_=False, uid=uid) if avg_img.max() == 0: - buids.append(uid) + builds.append(uid) guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) print("The bad uid is: %s" % uid) else: guids.pop(list(np.where(np.array(guids) == uid)[0])[0]) - buids.append(uid) + builds.append(uid) print("The bad uid is: %s" % uid) - print("The total and bad uids number are %s and %s, repsectively." % (len(uids), len(buids))) - return guids, buids + print( + "The total and bad uids number are %s and %s, respectively." + % (len(uids), len(builds)) + ) + return guids, builds def find_uids(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: sids: list, scan id uids: list, uid with 8 character length @@ -3022,7 +3124,9 @@ def get_bad_frame_list( fp = path + "%s" % (uid) + "_imgsum_analysis" + ".png" plt.savefig(fp, dpi=fig.dpi) - bd2 = list(np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start) + bd2 = list( + np.where(np.abs(data - data.mean()) > scale * data.std())[0] + good_start + ) if return_ylim: return np.array(bd1 + bd2 + bd3), ymin, ymax @@ -3097,7 +3201,7 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): kwargs: overwrite the meta data, for example get_meta_data( uid = uid, sample = 'test') --> will overwrtie the meta's sample to test return: - meta data of the uid: a dictionay + meta data of the uid: a dictionary with keys: detector suid: the simple given uid @@ -3161,10 +3265,16 @@ def get_meta_data(uid, default_dec="eiger", *argv, **kwargs): md.update(header.start.items()) # print(header.start.time) - md["start_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"])) - md["stop_time"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"])) + md["start_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.start["time"]) + ) + md["stop_time"] = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(header.stop["time"]) + ) try: # added: try to handle runs that don't contain image data - md["img_shape"] = header["descriptors"][0]["data_keys"][md["detector"]]["shape"][:2][::-1] + md["img_shape"] = header["descriptors"][0]["data_keys"][md["detector"]][ + "shape" + ][:2][::-1] except: if verbose: print("couldn't find image shape...skip!") @@ -3218,7 +3328,9 @@ def get_max_countc(FD, labeled_array): ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): try: (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -3241,7 +3353,7 @@ def create_polygon_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3264,7 +3376,7 @@ def create_rectangle_mask(image, xcorners, ycorners): """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import polygon imy, imx = image.shape bst_mask = np.zeros_like(image, dtype=bool) @@ -3274,7 +3386,9 @@ def create_rectangle_mask(image, xcorners, ycorners): return bst_mask -def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50, angles=[0]): +def create_multi_rotated_rectangle_mask( + image, center=None, length=100, width=50, angles=[0] +): """Developed at July 10, 2017 by Y.G.@CHX, NSLS2 Create multi rectangle-shaped mask by rotating a rectangle with a list of angles The original rectangle is defined by four corners, i.e., @@ -3316,7 +3430,9 @@ def create_multi_rotated_rectangle_mask(image, center=None, length=100, width=50 mask[rr, cc] = 1 mask_rot = np.zeros(image.shape, dtype=bool) for angle in angles: - mask_rot += np.array(rotate(mask, angle, center=center), dtype=bool) # , preserve_range=True) + mask_rot += np.array( + rotate(mask, angle, center=center), dtype=bool + ) # , preserve_range=True) return ~mask_rot @@ -3326,7 +3442,7 @@ def create_wedge(image, center, radius, wcors, acute_angle=True): wcors: [ [x1,x2,x3...], [y1,y2,y3..] """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cy, cx = center @@ -3362,7 +3478,7 @@ def create_cross_mask( """ Give image and the beam center to create a cross-shaped mask wy_left: the width of left h-line - wy_right: the width of rigth h-line + wy_right: the width of right h-line wx_up: the width of up v-line wx_down: the width of down v-line center_disk: if True, create a disk with center and center_radius @@ -3370,7 +3486,7 @@ def create_cross_mask( Return: the cross mask """ - from skimage.draw import disk, line, line_aa, polygon + from skimage.draw import disk, polygon imy, imx = image.shape cx, cy = center @@ -3512,7 +3628,7 @@ def get_full_data_path(uid): def get_sid_filenames(header): """YG. Dev Jan, 2016 - Get a bluesky scan_id, unique_id, filename by giveing uid + Get a bluesky scan_id, unique_id, filename by giving uid Parameters ---------- @@ -3522,9 +3638,9 @@ def get_sid_filenames(header): ------- scan_id: integer unique_id: string, a full string of a uid - filename: sring + filename: string - Usuage: + Usage: sid,uid, filenames = get_sid_filenames(db[uid]) """ @@ -3539,8 +3655,10 @@ def get_sid_filenames(header): return header.start["scan_id"], header.start["uid"], filepaths -def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False): - """load bluesky scan data by giveing uid and detector +def load_data( + uid, detector="eiger4m_single_image", fill=True, reverse=False, rot90=False +): + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3554,7 +3672,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3599,7 +3717,7 @@ def load_data(uid, detector="eiger4m_single_image", fill=True, reverse=False, ro def mask_badpixels(mask, detector): """ - Mask known bad pixel from the giveing mask + Mask known bad pixel from the giving mask """ if detector == "eiger1m_single_image": @@ -3624,7 +3742,7 @@ def mask_badpixels(mask, detector): def load_data2(uid, detector="eiger4m_single_image"): - """load bluesky scan data by giveing uid and detector + """load bluesky scan data by giving uid and detector Parameters ---------- @@ -3636,7 +3754,7 @@ def load_data2(uid, detector="eiger4m_single_image"): image data: a pims frames series if not success read the uid, will return image data as 0 - Usuage: + Usage: imgs = load_data( uid, detector ) md = imgs.md """ @@ -3652,7 +3770,7 @@ def load_data2(uid, detector="eiger4m_single_image"): if flag: print("Can't Load Data!") - uid = "00000" # in case of failling load data + uid = "00000" # in case of failing load data imgs = 0 else: imgs = ev["data"][detector] @@ -3686,7 +3804,9 @@ def pload_obj(filename): return pickle.load(f) -def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs): +def load_mask( + path, mask_name, plot_=False, reverse=False, rot90=False, *argv, **kwargs +): """load a mask file the mask is a numpy binary file (.npy) @@ -3694,14 +3814,14 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * ---------- path: the path of the mask file mask_name: the name of the mask file - plot_: a boolen type + plot_: a boolean type reverse: if True, reverse the image upside down to match the "real" image geometry (should always be True in the future) Returns ------- mask: array if plot_ =True, will show the mask - Usuage: + Usage: mask = load_mask( path, mask_name, plot_ = True ) """ @@ -3716,7 +3836,9 @@ def load_mask(path, mask_name, plot_=False, reverse=False, rot90=False, *argv, * return mask -def create_hot_pixel_mask(img, threshold, center=None, center_radius=300, outer_radius=0): +def create_hot_pixel_mask( + img, threshold, center=None, center_radius=300, outer_radius=0 +): """create a hot pixel mask by giving threshold Input: img: the image to create hot pixel mask @@ -3898,7 +4020,9 @@ def show_img( extent=extent, ) if label_array is not None: - im2 = show_label_array(ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation) + im2 = show_label_array( + ax, label_array, alpha=alpha, cmap=cmap, interpolation=interpolation + ) ax.set_title(image_name) if xlim is not None: @@ -3929,7 +4053,9 @@ def show_img( ax.set_aspect(aspect="auto") if show_colorbar: - cbar = fig.colorbar(im, extend="neither", spacing="proportional", orientation="vertical") + cbar = fig.colorbar( + im, extend="neither", spacing="proportional", orientation="vertical" + ) cbar.ax.tick_params(labelsize=colorbar_fontsize) fig.set_tight_layout(tight) if save: @@ -3976,7 +4102,7 @@ def plot1D( ---------- y: column-y x: column-x, by default x=None, the plot will use index of y as x-axis - the other paramaters are defined same as plt.plot + the other parameters are defined same as plt.plot Returns ------- None @@ -4092,7 +4218,9 @@ def plot1D( ### -def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs): +def check_shutter_open( + data_series, min_inten=0, time_edge=[0, 10], plot_=False, *argv, **kwargs +): """Check the first frame with shutter open Parameters @@ -4104,11 +4232,13 @@ def check_shutter_open(data_series, min_inten=0, time_edge=[0, 10], plot_=False, return: shutter_open_frame: a integer, the first frame number with open shutter - Usuage: + Usage: good_start = check_shutter_open( imgsa, min_inten=5, time_edge = [0,20], plot_ = False ) """ - imgsum = np.array([np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]]) + imgsum = np.array( + [np.sum(img) for img in data_series[time_edge[0] : time_edge[1] : 1]] + ) if plot_: fig, ax = plt.subplots() ax.plot(imgsum, "bo") @@ -4133,13 +4263,15 @@ def get_each_frame_intensity( """Get the total intensity of each frame by sampling every N frames Also get bad_frame_list by check whether above bad_pixel_threshold - Usuage: + Usage: imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = 1000, bad_pixel_threshold=1e10, plot_ = True) """ # print ( argv, kwargs ) - imgsum = np.array([np.sum(img) for img in tqdm(data_series[::sampling], leave=True)]) + imgsum = np.array( + [np.sum(img) for img in tqdm(data_series[::sampling], leave=True)] + ) if plot_: uid = "uid" if "uid" in kwargs.keys(): @@ -4203,7 +4335,9 @@ def create_time_slice(N, slice_num, slice_width, edges=None): return np.array(time_edge) -def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs): +def show_label_array( + ax, label_array, cmap=None, aspect=None, interpolation="nearest", **kwargs +): """ YG. Sep 26, 2017 Modified show_label_array(ax, label_array, cmap=None, **kwargs) @@ -4231,7 +4365,9 @@ def show_label_array(ax, label_array, cmap=None, aspect=None, interpolation="nea _cmap = copy.copy((mcm.get_cmap(cmap))) _cmap.set_under("w", 0) vmin = max(0.5, kwargs.pop("vmin", 0.5)) - im = ax.imshow(label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs) + im = ax.imshow( + label_array, cmap=cmap, interpolation=interpolation, vmin=vmin, **kwargs + ) if aspect is None: ax.set_aspect(aspect="auto") # ax.set_aspect('equal') @@ -4457,7 +4593,7 @@ def show_ROI_on_image( def crop_image(image, crop_mask): """Crop the non_zeros pixels of an image to a new image""" - from skimage.util import crop, pad + from skimage.util import crop pxlst = np.where(crop_mask.ravel())[0] dims = crop_mask.shape @@ -4509,7 +4645,9 @@ def get_avg_img( if "uid" in kwargs.keys(): uid = kwargs["uid"] - im = ax.imshow(avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2)) + im = ax.imshow( + avg_img, cmap="viridis", origin="lower", norm=LogNorm(vmin=0.001, vmax=1e2) + ) # ax.set_title("Masked Averaged Image") ax.set_title("uid= %s--Masked Averaged Image" % uid) fig.colorbar(im) @@ -4530,7 +4668,9 @@ def get_avg_img( return avg_img -def check_ROI_intensity(avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs): +def check_ROI_intensity( + avg_img, ring_mask, ring_number=3, save=False, plot=True, *argv, **kwargs +): """plot intensity versus pixel of a ring Parameters ---------- @@ -4597,19 +4737,29 @@ def cal_g2( if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) print("Bad Frames involved!") - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(new_imgs)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(new_imgs) + ) print("G2 calculation DONE!") else: if num_lev is None: num_lev = int(np.log(noframes / (num_buf - 1)) / np.log(2) + 1) + 1 - print("In this g2 calculation, the buf and lev number are: %s--%s--" % (num_buf, num_lev)) + print( + "In this g2 calculation, the buf and lev number are: %s--%s--" + % (num_buf, num_lev) + ) print("%s frames will be processed..." % (noframes)) - g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(image_series)) + g2, lag_steps = corr.multi_tau_auto_corr( + num_lev, num_buf, ring_mask, tqdm(image_series) + ) print("G2 calculation DONE!") return g2, lag_steps @@ -4644,13 +4794,12 @@ def trans_data_to_pd(data, label=None, dtype="array"): convert data into pandas.DataFrame Input: data: list or np.array - label: the coloum label of the data + label: the column label of the data dtype: list or array [[NOT WORK or dict (for dict only save the scalar not arrays values)]] Output: a pandas.DataFrame """ # lists a [ list1, list2...] all the list have the same length - import sys import pandas as pd from numpy import arange, array @@ -4662,7 +4811,7 @@ def trans_data_to_pd(data, label=None, dtype="array"): data = array(data) N, M = data.shape else: - print("Wrong data type! Now only support 'list' and 'array' tpye") + print("Wrong data type! Now only support 'list' and 'array' type") index = arange(N) if label is None: @@ -4672,7 +4821,9 @@ def trans_data_to_pd(data, label=None, dtype="array"): return df -def save_lists(data, label=None, filename=None, path=None, return_res=False, verbose=False): +def save_lists( + data, label=None, filename=None, path=None, return_res=False, verbose=False +): """ save_lists( data, label=None, filename=None, path=None) @@ -4776,13 +4927,13 @@ def save_arrays( def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): """YG Dev Nov 20, 2017@CHX - calculate particle g2 fucntion by giving particle radius, Q , and solution viscosity using a simple + calculate particle g2 function by giving particle radius, Q , and solution viscosity using a simple exponetional model Input: radius: m qr, list, in A-1 visocity: N*s/m^2 (water at 25K = 8.9*10^(-4) ) - T: temperture, in K + T: temperature, in K e.g., for a 250 nm sphere in glycerol/water (90:10) at RT (298K) gives: 1.38064852*10**(-123)*298 / ( 6*np.pi * 0.20871 * 250 *10**(-9)) * 10**20 /1e5 = 4.18*10**5 A2/s taus: time @@ -4795,7 +4946,9 @@ def cal_particle_g2(radius, viscosity, qr, taus, beta=0.2, T=298): g2_q1 = np.zeros(len(qr), dtype=object) for i, q1 in enumerate(qr): relaxation_rate = D0 * q1**2 - g2_q1[i] = simple_exponential(taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1) + g2_q1[i] = simple_exponential( + taus, beta=beta, relaxation_rate=relaxation_rate, baseline=1 + ) return g2_q1 @@ -4915,7 +5068,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): spacing_is_list = isinstance(spacing, collections.Iterable) if width_is_list and spacing_is_list: if len(width) != len(spacing) + 1: - raise ValueError("List of spacings must be one less than list " "of widths.") + raise ValueError("List of spacings must be one less than list of widths.") if num_rings is None: try: num_rings = len(width) @@ -4935,7 +5088,7 @@ def ring_edges(inner_radius, width, spacing=0, num_rings=None): if spacing_is_list: if num_rings - 1 != len(spacing): raise ValueError("num_rings does not match spacing list") - # Now regularlize the input. + # Now regularize the input. if not width_is_list: width = np.ones(num_rings) * width @@ -5010,13 +5163,12 @@ def trans_tf_to_td(tf, dtype="dframe"): import datetime import numpy as np - import pandas as pd """translate time.float to time.date, td.type dframe: a dataframe td.type list, a list """ - if dtype is "dframe": + if dtype == "dframe": ind = tf.index else: ind = range(len(tf)) @@ -5037,7 +5189,7 @@ def trans_td_to_tf(td, dtype="dframe"): td.type dframe: a dataframe td.type list, a list """ - if dtype is "dframe": + if dtype == "dframe": ind = td.index else: ind = range(len(td)) @@ -5121,7 +5273,9 @@ def get_averaged_data_from_multi_res( if D != 3: keystr_average[sk[i] : sk[i + 1]] /= avg_count[sk[i + 1]] else: - keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[sk[i + 1]] + keystr_average[sk[i] : sk[i + 1], sk[i] : sk[i + 1], :] /= avg_count[ + sk[i + 1] + ] return keystr_average @@ -5160,7 +5314,9 @@ def save_g2_general(g2, taus, qr=None, qz=None, uid="uid", path=None, return_res # filename += '-uid=%s.csv' % (uid) filename1 = os.path.join(path, filename) df.to_csv(filename1) - print("The correlation function is saved in %s with filename as %s" % (path, filename)) + print( + "The correlation function is saved in %s with filename as %s" % (path, filename) + ) if return_res: return df @@ -5179,17 +5335,35 @@ def simple_exponential(x, beta, relaxation_rate, baseline=1): def simple_exponential_with_vibration(x, beta, relaxation_rate, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * relaxation_rate * x) + baseline + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * relaxation_rate * x) + + baseline + ) -def stretched_auto_corr_scat_factor_with_vibration(x, beta, relaxation_rate, alpha, freq, amp, baseline=1): - return beta * (1 + amp * np.cos(2 * np.pi * freq * x)) * np.exp(-2 * (relaxation_rate * x) ** alpha) + baseline +def stretched_auto_corr_scat_factor_with_vibration( + x, beta, relaxation_rate, alpha, freq, amp, baseline=1 +): + return ( + beta + * (1 + amp * np.cos(2 * np.pi * freq * x)) + * np.exp(-2 * (relaxation_rate * x) ** alpha) + + baseline + ) -def flow_para_function_with_vibration(x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1): +def flow_para_function_with_vibration( + x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1 +): vibration_part = 1 + amp * np.cos(2 * np.pi * freq * x) Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * vibration_part * Diff_part * Flow_part + baseline @@ -5197,11 +5371,17 @@ def flow_para_function(x, beta, relaxation_rate, flow_velocity, baseline=1): """flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )""" Diff_part = np.exp(-2 * relaxation_rate * x) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline -def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0): +def flow_para_function_explicitq( + x, beta, diffusion, flow_velocity, alpha=1, baseline=1, qr=1, q_ang=0 +): """Nov 9, 2017 Basically, make q vector to (qr, angle), ###relaxation_rate is actually a diffusion rate flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) @@ -5216,7 +5396,14 @@ def flow_para_function_explicitq(x, beta, diffusion, flow_velocity, alpha=1, bas Flow_part = ( np.pi**2 / (16 * x * flow_velocity * qr * abs(np.cos(q_ang))) - * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang))))) ** 2 + * abs( + erf( + np.sqrt( + 4 / np.pi * 1j * x * flow_velocity * qr * abs(np.cos(q_ang)) + ) + ) + ) + ** 2 ) else: Flow_part = 1 @@ -5229,12 +5416,18 @@ def get_flow_velocity(average_velocity, shape_factor): return average_velocity * (1 - shape_factor) / (1 + shape_factor) -def stretched_flow_para_function(x, beta, relaxation_rate, alpha, flow_velocity, baseline=1): +def stretched_flow_para_function( + x, beta, relaxation_rate, alpha, flow_velocity, baseline=1 +): """ flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) ) """ Diff_part = np.exp(-2 * (relaxation_rate * x) ** alpha) - Flow_part = np.pi**2 / (16 * x * flow_velocity) * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + Flow_part = ( + np.pi**2 + / (16 * x * flow_velocity) + * abs(erf(np.sqrt(4 / np.pi * 1j * x * flow_velocity))) ** 2 + ) return beta * Diff_part * Flow_part + baseline @@ -5252,10 +5445,14 @@ def get_g2_fit_general_two_steps( i) Using the "function" to fit whole g2 to get baseline and beta (contrast) ii) Then using the obtained baseline and beta to fit g2 in a "second_fit_range" by using simple_exponential function """ - g2_fit_result, taus_fit, g2_fit = get_g2_fit_general(g2, taus, function, sequential_fit, *argv, **kwargs) + g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( + g2, taus, function, sequential_fit, *argv, **kwargs + ) guess_values = {} for k in list(g2_fit_result[0].params.keys()): - guess_values[k] = np.array([g2_fit_result[i].params[k].value for i in range(g2.shape[1])]) + guess_values[k] = np.array( + [g2_fit_result[i].params[k].value for i in range(g2.shape[1])] + ) if "guess_limits" in kwargs: guess_limits = kwargs["guess_limits"] @@ -5315,9 +5512,9 @@ def get_g2_fit_general( supported function include: 'simple_exponential' (or 'simple'): fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential'(or 'streched'): fit by a streched exponential function, defined as + 'stretched_exponential'(or 'stretched'): fit by a stretched exponential function, defined as beta * ( np.exp( -2 * ( relaxation_rate * tau )**alpha ) + baseline - 'stretched_vibration': fit by a streched exponential function with vibration, defined as + 'stretched_vibration': fit by a stretched exponential function with vibration, defined as beta * (1 + amp*np.cos( 2*np.pi*60* x) )* np.exp(-2 * (relaxation_rate * x)**alpha) + baseline 'flow_para_function' (or flow): fit by a flow function @@ -5329,7 +5526,7 @@ def get_g2_fit_general( beta, relaxation_rate , alpha ,baseline values: a False or True, False for not vary 'guess_values': a dict, for initial value of the fitting para, - the defalut values are + the default values are dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0) 'guess_limits': a dict, for the limits of the fittting para, for example: @@ -5338,7 +5535,7 @@ def get_g2_fit_general( dict( baseline =[0.5, 2.5], alpha=[0, inf] ,beta = [0, 1], relaxation_rate= [0.0,1000] ) Returns ------- - fit resutls: a instance in limfit + fit results: a instance in limfit tau_fit fit_data by the model, it has the q number of g2 @@ -5367,16 +5564,22 @@ def get_g2_fit_general( _vars = [] if function == "simple_exponential" or function == "simple": _vars = np.unique(_vars + ["alpha"]) - mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= list( _vars) ) + mod = Model( + stretched_auto_corr_scat_factor + ) # , independent_vars= list( _vars) ) elif function == "stretched_exponential" or function == "stretched": mod = Model(stretched_auto_corr_scat_factor) # , independent_vars= _vars) elif function == "stretched_vibration": - mod = Model(stretched_auto_corr_scat_factor_with_vibration) # , independent_vars= _vars) + mod = Model( + stretched_auto_corr_scat_factor_with_vibration + ) # , independent_vars= _vars) elif function == "flow_para_function" or function == "flow_para": mod = Model(flow_para_function) # , independent_vars= _vars) elif function == "flow_para_function_explicitq" or function == "flow_para_qang": mod = Model(flow_para_function_explicitq) # , independent_vars= _vars) - elif function == "flow_para_function_with_vibration" or function == "flow_vibration": + elif ( + function == "flow_para_function_with_vibration" or function == "flow_vibration" + ): mod = Model(flow_para_function_with_vibration) else: @@ -5397,7 +5600,11 @@ def get_g2_fit_general( for k in list(guess_limits.keys()): mod.set_param_hint(k, min=guess_limits[k][0], max=guess_limits[k][1]) - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): mod.set_param_hint("flow_velocity", min=0) if function == "flow_para_function_explicitq" or function == "flow_para_qang": mod.set_param_hint("flow_velocity", min=0) @@ -5545,7 +5752,9 @@ def get_g2_fit_general( # pars[k].value = _guess_val[k][i] if function == "flow_para_function_explicitq" or function == "flow_para_qang": if qval_dict is None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: pars = mod.make_params( beta=_beta_, @@ -5722,14 +5931,14 @@ def plot_g2_general( function: 'simple_exponential': fit by a simple exponential function, defined as beta * np.exp(-2 * relaxation_rate * lags) + baseline - 'streched_exponential': fit by a streched exponential function, defined as + 'stretched_exponential': fit by a stretched exponential function, defined as beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline geometry: 'saxs': a saxs with Qr partition 'ang_saxs': a saxs with Qr and angular partition 'gi_saxs': gisaxs with Qz, Qr - one_plot: if True, plot all images in one pannel + one_plot: if True, plot all images in one panel kwargs: Returns @@ -5827,12 +6036,16 @@ def plot_g2_general( if geometry == "ang_saxs": title_short = "Angle= %.2f" % (short_ulabel[s_ind]) + r"$^\circ$" elif geometry == "gi_saxs": - title_short = r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_z= $" + "%.4f" % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" else: # qr if geometry == "ang_saxs" or geometry == "gi_saxs": - title_short = r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + title_short = ( + r"$Q_r= $" + "%.5f " % (short_ulabel[s_ind]) + r"$\AA^{-1}$" + ) else: title_short = "" # print(geometry) @@ -5880,22 +6093,30 @@ def plot_g2_general( # ax = fig[fig_subnum].add_subplot(sx,sy, i + 1 - fig_subnum*max_plotnum_fig) fig_subnum = i // max_plotnum_fig # print( i, sx,sy, fig_subnum, max_plotnum_fig, i + 1 - fig_subnum*max_plotnum_fig ) - ax = fig[fig_subnum].add_subplot(sx, sy, i + 1 - fig_subnum * max_plotnum_fig) + ax = fig[fig_subnum].add_subplot( + sx, sy, i + 1 - fig_subnum * max_plotnum_fig + ) ax.set_ylabel(r"$%s$" % ylabel + "(" + r"$\tau$" + ")") ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) if master_plot == "qz" or master_plot == "angle": if geometry != "gi_waxs": - title_long = r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_r= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = r"$Q_r= $" + "%i " % (long_label[l_ind]) # print( title_long,long_label,l_ind ) else: if geometry == "ang_saxs": # title_long = 'Ang= ' + '%.2f'%( long_label[l_ind] ) + r'$^\circ$' + '( %d )'%(l_ind) - title_long = "Ang= " + "%.2f" % (long_label[l_ind]) # + r'$^\circ$' + '( %d )'%(l_ind) + title_long = ( + "Ang= " + "%.2f" % (long_label[l_ind]) + ) # + r'$^\circ$' + '( %d )'%(l_ind) elif geometry == "gi_saxs": - title_long = r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + title_long = ( + r"$Q_z= $" + "%.5f " % (long_label[l_ind]) + r"$\AA^{-1}$" + ) else: title_long = "" # print( master_plot ) @@ -5912,7 +6133,9 @@ def plot_g2_general( if qth_interest is not None: # it might have a bug here, todolist!!! lab = sorted(list(qval_dict_.keys())) # print( lab, l_ind) - ax.set_title(title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12) + ax.set_title( + title_long + " (%s )" % (lab[l_ind] + 1), y=1.05, fontsize=12 + ) for ki, k in enumerate(list(g2_dict_.keys())): if ki == 0: c = "b" @@ -5967,7 +6190,9 @@ def plot_g2_general( else: yerr = g2_err_dict[k][nlst][:, l_ind] if g2_labels is None: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) else: if nlst == 0: ax.errorbar( @@ -5980,7 +6205,9 @@ def plot_g2_general( label=g2_labels[ki], ) else: - ax.errorbar(x, y, yerr=yerr, fmt=m, color=c, markersize=6) + ax.errorbar( + x, y, yerr=yerr, fmt=m, color=c, markersize=6 + ) ax.set_xscale("log", nonposx="clip") if nlst == 0: if l_ind == 0: @@ -6000,7 +6227,9 @@ def plot_g2_general( if g2_labels is None: ax.semilogx(x, y, m, color=c, markersize=6) else: - ax.semilogx(x, y, m, color=c, markersize=6, label=g2_labels[ki]) + ax.semilogx( + x, y, m, color=c, markersize=6, label=g2_labels[ki] + ) else: yerr = g2_err_dict[k][:, l_ind] # print(x.shape, y.shape, yerr.shape) @@ -6040,17 +6269,26 @@ def plot_g2_general( elif function == "flow_vibration": rate = result1.best_values["relaxation_rate"] freq = result1.best_values["freq"] - if function == "flow_para_function" or function == "flow_para" or function == "flow_vibration": + if ( + function == "flow_para_function" + or function == "flow_para" + or function == "flow_vibration" + ): rate = result1.best_values["relaxation_rate"] flow = result1.best_values["flow_velocity"] - if function == "flow_para_function_explicitq" or function == "flow_para_qang": + if ( + function == "flow_para_function_explicitq" + or function == "flow_para_qang" + ): diff = result1.best_values["diffusion"] qrr = short_ulabel[s_ind] # print(qrr) rate = diff * qrr**2 flow = result1.best_values["flow_velocity"] if qval_dict_ is None: - print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).") + print( + "Please provide qval_dict, a dict with qr and ang (in unit of degrees)." + ) else: pass @@ -6083,7 +6321,9 @@ def plot_g2_general( txts = r"$baseline$" + r"$ = %.3f$" % (baseline) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if ( function == "flow_para_function" @@ -6113,7 +6353,9 @@ def plot_g2_general( txts = r"$\beta$" + r"$ = %.3f$" % (beta) dt += 0.1 - ax.text(x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes) + ax.text( + x=x, y=y0 - dt, s=txts, fontsize=fontsize, transform=ax.transAxes + ) if "ylim" in kwargs: ax.set_ylim(kwargs["ylim"]) @@ -6132,7 +6374,7 @@ def plot_g2_general( else: fp = path + filename + "_%s_%s" % (mastp, s_ind) - if append_name is not "": + if append_name != "": fp = fp + append_name fps.append(fp + ".png") # if num_long_i <= 16: @@ -6150,7 +6392,7 @@ def plot_g2_general( for fn, f in enumerate(fig): f.set_tight_layout(True) fp = path + filename + "_q_%s_%s" % (fn * 16, (fn + 1) * 16) - if append_name is not "": + if append_name != "": fp = fp + append_name fps.append(fp + ".png") f.savefig(fp + ".png", dpi=f.dpi) @@ -6159,7 +6401,7 @@ def plot_g2_general( if (num_short != 1) or (num_long_i > 16): outputfile = path + filename + ".png" - if append_name is not "": + if append_name != "": outputfile = path + filename + append_name + "__joint.png" else: outputfile = path + filename + "__joint.png" @@ -6172,7 +6414,9 @@ def power_func(x, D0, power=2): return D0 * x**power -def get_q_rate_fit_general(qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs): +def get_q_rate_fit_general( + qval_dict, rate, geometry="saxs", weights=None, *argv, **kwargs +): """ Dec 26,2016, Y.G.@CHX @@ -6337,7 +6581,9 @@ def plot_q_rate_fit_general( if show_text: txts = r"$D0: %.3e$" % D0 + r" $A^2$" + r"$s^{-1}$" dy = 0.1 - ax.text(x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes) + ax.text( + x=0.15, y=0.65 - dy * i, s=txts, fontsize=14, transform=ax.transAxes + ) if Nqz != 1: legend = ax.legend(loc="best") diff --git a/pyCHX/v2/_futurepyCHX/chx_handlers.py b/pyCHX/v2/_futurepyCHX/chx_handlers.py index 998ce9c..3ca8aa1 100644 --- a/pyCHX/v2/_futurepyCHX/chx_handlers.py +++ b/pyCHX/v2/_futurepyCHX/chx_handlers.py @@ -5,7 +5,6 @@ # handler registration and database instantiation should be done # here and only here! from databroker import Broker -from databroker.assets.handlers_base import HandlerBase from eiger_io.fs_handler import EigerHandler as EigerHandlerPIMS from eiger_io.fs_handler import EigerImages as EigerImagesPIMS diff --git a/pyCHX/v2/_futurepyCHX/chx_libs.py b/pyCHX/v2/_futurepyCHX/chx_libs.py index aa18797..f1120c7 100644 --- a/pyCHX/v2/_futurepyCHX/chx_libs.py +++ b/pyCHX/v2/_futurepyCHX/chx_libs.py @@ -3,58 +3,25 @@ yuzhang@bnl.gov This module is for the necessary packages for the XPCS analysis """ + ## Import all the required packages for Data Analysis from databroker import Broker -from databroker.assets.path_only_handlers import RawHandler # edit handlers here to switch to PIMS or dask # this does the databroker import # from chxtools.handlers import EigerHandler -from eiger_io.fs_handler import EigerHandler -from IPython.core.magics.display import Javascript -from modest_image import imshow -from skbeam.core.utils import multi_tau_lags -from skimage.draw import disk, ellipse, line, line_aa, polygon db = Broker.named("chx") -import collections -import copy -import getpass import itertools -import os -import pickle -import random -import sys -import time -import warnings -from datetime import datetime -import h5py import matplotlib as mpl -import matplotlib.cm as mcm import matplotlib.pyplot as plt import numpy as np -import pims -import skbeam.core.correlation as corr -import skbeam.core.roi as roi -import skbeam.core.utils as utils # * scikit-beam - data analysis tools for X-ray science # - https://github.com/scikit-beam/scikit-beam # * xray-vision - plotting helper functions for X-ray science # - https://github.com/Nikea/xray-vision -import xray_vision -import xray_vision.mpl_plotting as mpl_plot -from lmfit import Model, Parameter, Parameters, minimize, report_fit -from matplotlib import gridspec -from matplotlib.colors import LogNorm -from matplotlib.figure import Figure -from mpl_toolkits.axes_grid1 import make_axes_locatable -from pandas import DataFrame -from PIL import Image -from tqdm import tqdm -from xray_vision.mask.manual_mask import ManualMask -from xray_vision.mpl_plotting import speckle mcolors = itertools.cycle( [ @@ -402,7 +369,9 @@ [1, 0, 0], [0.5, 0.0, 0.0], ] -cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list("cmap_jet_extended", color_list_jet_extended) +cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_jet_extended", color_list_jet_extended +) # Tweaked version of "view.gtk" default color scale color_list_vge = [ @@ -425,9 +394,11 @@ [254.0 / 255.0, 254.0 / 255.0, 0.0 / 255.0], [254.0 / 255.0, 254.0 / 255.0, 254.0 / 255.0], ] -cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list("cmap_vge_hdr", color_list_vge_hdr) +cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_vge_hdr", color_list_vge_hdr +) -# Simliar to Dectris ALBULA default color-scale +# Similar to Dectris ALBULA default color-scale color_list_hdr_albula = [ [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], [0.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], @@ -435,9 +406,13 @@ [255.0 / 255.0, 255.0 / 255.0, 0.0 / 255.0], # [ 255.0/255.0, 255.0/255.0, 255.0/255.0], ] -cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_albula", color_list_hdr_albula) +cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_albula", color_list_hdr_albula +) cmap_albula = cmap_hdr_albula -cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_r", color_list_hdr_albula[::-1]) +cmap_albula_r = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_r", color_list_hdr_albula[::-1] +) # Ugly color-scale, but good for highlighting many features in HDR data color_list_cur_hdr_goldish = [ @@ -451,4 +426,6 @@ [200.0 / 255.0, 0.0 / 255.0, 0.0 / 255.0], # red [255.0 / 255.0, 255.0 / 255.0, 255.0 / 255.0], # white ] -cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list("cmap_hdr_goldish", color_list_cur_hdr_goldish) +cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list( + "cmap_hdr_goldish", color_list_cur_hdr_goldish +) diff --git a/pyCHX/v2/_futurepyCHX/chx_olog.py b/pyCHX/v2/_futurepyCHX/chx_olog.py index 880c9f4..8e39c47 100644 --- a/pyCHX/v2/_futurepyCHX/chx_olog.py +++ b/pyCHX/v2/_futurepyCHX/chx_olog.py @@ -110,12 +110,15 @@ def update_olog_id(logid, text, attachments, verbose=True): ) client.updateLog(logid, upd) if verbose: - print(f"The url={url} was successfully updated with {text} and with " f"the attachments") + print( + f"The url={url} was successfully updated with {text} and with " + f"the attachments" + ) def update_olog_uid(uid, text, attachments): """ - Update olog book logid entry cotaining uid string with text and attachments + Update olog book logid entry containing uid string with text and attachments files. Parameters diff --git a/pyCHX/v2/_futurepyCHX/chx_packages.py b/pyCHX/v2/_futurepyCHX/chx_packages.py index c3087c8..e6de659 100644 --- a/pyCHX/v2/_futurepyCHX/chx_packages.py +++ b/pyCHX/v2/_futurepyCHX/chx_packages.py @@ -1,250 +1,8 @@ -import pickle as cpk - -import historydict -from eiger_io.fs_handler import EigerImages -from skimage.draw import line, line_aa, polygon - -from pyCHX.chx_handlers import use_dask, use_pims +from pyCHX.chx_handlers import use_pims from pyCHX.chx_libs import ( - EigerHandler, - Javascript, - LogNorm, - Model, - cmap_albula, - cmap_vge, - datetime, db, - getpass, - h5py, - multi_tau_lags, - np, - os, - pims, - plt, - random, - roi, - time, - tqdm, - utils, - warnings, ) -use_pims(db) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' - -from pyCHX.chx_compress import ( - MultifileBNLCustom, - combine_binary_files, - create_compress_header, - para_compress_eigerdata, - para_segment_compress_eigerdata, - segment_compress_eigerdata, -) -from pyCHX.chx_compress_analysis import ( - Multifile, - cal_each_ring_mean_intensityc, - cal_waterfallc, - compress_eigerdata, - get_avg_imgc, - get_each_frame_intensityc, - get_each_ring_mean_intensityc, - get_time_edge_avg_img, - mean_intensityc, - plot_each_ring_mean_intensityc, - plot_waterfallc, - read_compressed_eigerdata, -) -from pyCHX.chx_correlationc import Get_Pixel_Arrayc, auto_two_Arrayc, cal_g2c, get_pixelist_interp_iq -from pyCHX.chx_correlationp import _one_time_process_errorp, auto_two_Arrayp, cal_g2p, cal_GPF, get_g2_from_ROI_GPF -from pyCHX.chx_crosscor import CrossCorrelator2, run_para_ccorr_sym -from pyCHX.chx_generic_functions import ( - R_2, - apply_mask, - average_array_withNan, - check_bad_uids, - check_lost_metadata, - check_ROI_intensity, - check_shutter_open, - combine_images, - copy_data, - create_cross_mask, - create_fullImg_with_box, - create_hot_pixel_mask, - create_polygon_mask, - create_rectangle_mask, - create_ring_mask, - create_seg_ring, - create_time_slice, - create_user_folder, - delete_data, - extract_data_from_file, - filter_roi_mask, - find_bad_pixels, - find_bad_pixels_FD, - find_good_xpcs_uids, - find_index, - find_uids, - fit_one_peak_curve, - get_averaged_data_from_multi_res, - get_avg_img, - get_bad_frame_list, - get_base_all_filenames, - get_cross_point, - get_current_pipeline_filename, - get_current_pipeline_fullpath, - get_curve_turning_points, - get_detector, - get_detectors, - get_each_frame_intensity, - get_echos, - get_eigerImage_per_file, - get_fit_by_two_linear, - get_fra_num_by_dose, - get_g2_fit_general, - get_image_edge, - get_image_with_roi, - get_img_from_iq, - get_last_uids, - get_mass_center_one_roi, - get_max_countc, - get_meta_data, - get_multi_tau_lag_steps, - get_non_uniform_edges, - get_print_uids, - get_q_rate_fit_general, - get_qval_dict, - get_qval_qwid_dict, - get_roi_mask_qval_qwid_by_shift, - get_roi_nr, - get_series_g2_taus, - get_SG_norm, - get_sid_filenames, - get_today_date, - get_touched_qwidth, - get_waxs_beam_center, - lin2log_g2, - linear_fit, - load_data, - load_mask, - load_pilatus, - ls_dir, - mask_badpixels, - mask_exclude_badpixel, - move_beamstop, - pad_length, - pload_obj, - plot1D, - plot_fit_two_linear_fit, - plot_g2_general, - plot_q_g2fitpara_general, - plot_q_rate_fit_general, - plot_q_rate_general, - plot_xy_with_fit, - plot_xy_x2, - print_dict, - psave_obj, - read_dict_csv, - refine_roi_mask, - reverse_updown, - ring_edges, - run_time, - save_array_to_tiff, - save_arrays, - save_current_pipeline, - save_dict_csv, - save_g2_fit_para_tocsv, - save_g2_general, - save_lists, - save_oavs_tifs, - sgolay2d, - shift_mask, - show_img, - show_ROI_on_image, - shrink_image, - trans_data_to_pd, - update_qval_dict, - update_roi_mask, - validate_uid, -) -from pyCHX.chx_olog import Attachment, LogEntry, update_olog_id, update_olog_uid, update_olog_uid_with_file -from pyCHX.chx_specklecp import ( - get_binned_his_std, - get_contrast, - get_his_std_from_pds, - get_xsvs_fit, - plot_g2_contrast, - plot_xsvs_fit, - save_bin_his_std, - save_KM, - xsvsc, - xsvsp, -) -from pyCHX.Create_Report import ( - create_multi_pdf_reports_for_uids, - create_one_pdf_reports_for_uids, - create_pdf_report, - export_xpcs_results_to_h5, - extract_xpcs_results_from_h5, - make_pdf_report, -) -from pyCHX.DataGonio import qphiavg -from pyCHX.SAXS import ( - fit_form_factor, - fit_form_factor2, - form_factor_residuals_bg_lmfit, - form_factor_residuals_lmfit, - get_form_factor_fit_lmfit, - poly_sphere_form_factor_intensity, - show_saxs_qmap, -) -from pyCHX.Two_Time_Correlation_Function import ( - get_aged_g2_from_g12, - get_aged_g2_from_g12q, - get_four_time_from_two_time, - get_one_time_from_two_time, - rotate_g12q_to_rectangle, - show_C12, -) -from pyCHX.XPCS_GiSAXS import ( - cal_1d_qr, - convert_gisaxs_pixel_to_q, - fit_qr_qz_rate, - get_1d_qr, - get_each_box_mean_intensity, - get_gisaxs_roi, - get_qedge, - get_qmap_label, - get_qr_tick_label, - get_qzr_map, - get_qzrmap, - get_reflected_angles, - get_t_qrc, - multi_uids_gisaxs_xpcs_analysis, - plot_gisaxs_g4, - plot_gisaxs_two_g2, - plot_qr_1d_with_ROI, - plot_qrt_pds, - plot_qzr_map, - plot_t_qrc, - show_qzr_map, - show_qzr_roi, -) -from pyCHX.XPCS_SAXS import ( - cal_g2, - combine_two_roi_mask, - create_hot_pixel_mask, - get_angular_mask, - get_circular_average, - get_cirucular_average_std, - get_each_ring_mean_intensity, - get_QrQw_From_RoiMask, - get_ring_mask, - get_seg_from_ring_mask, - get_t_iq, - get_t_iqc, - multi_uids_saxs_xpcs_analysis, - plot_circular_average, - plot_qIq_with_ROI, - plot_t_iqc, - recover_img_from_iq, - save_lists, -) +use_pims( + db +) # use pims for importing eiger data, register_handler 'AD_EIGER2' and 'AD_EIGER' diff --git a/pyCHX/v2/_futurepyCHX/chx_speckle.py b/pyCHX/v2/_futurepyCHX/chx_speckle.py index 75ab068..134913b 100644 --- a/pyCHX/v2/_futurepyCHX/chx_speckle.py +++ b/pyCHX/v2/_futurepyCHX/chx_speckle.py @@ -10,7 +10,6 @@ import logging import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series @@ -19,13 +18,10 @@ import sys from datetime import datetime -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq def xsvs( @@ -84,8 +80,8 @@ def xsvs( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -266,7 +262,7 @@ def xsvs( prob_k_all[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) prob_k_std_dev[i, j] = np.array([0] * (len(bin_edges[i]) - 1)) - logger.info("Processing time for XSVS took %s seconds." "", (time.time() - start_time)) + logger.info("Processing time for XSVS took %s seconds.", (time.time() - start_time)) elapsed_time = time.time() - start_time # print (Num) print("Total time: %.2f min" % (elapsed_time / 60.0)) @@ -332,7 +328,9 @@ def _process( roi_data = data[labels == label] spe_hist, bin_edges = np.histogram(roi_data, bins=bin_edges, density=True) spe_hist = np.nan_to_num(spe_hist) - prob_k[level, j] += (spe_hist - prob_k[level, j]) / (img_per_level[level] - track_bad_level[level]) + prob_k[level, j] += (spe_hist - prob_k[level, j]) / ( + img_per_level[level] - track_bad_level[level] + ) prob_k_pow[level, j] += (np.power(spe_hist, 2) - prob_k_pow[level, j]) / ( img_per_level[level] - track_bad_level[level] @@ -417,7 +415,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln @@ -425,8 +422,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -509,8 +506,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -576,9 +573,9 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline @@ -600,7 +597,7 @@ def plot_sxvs( xlim=[0, 3.5], time_steps=None, ): - """a convinent function to plot sxvs results""" + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -646,7 +643,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -654,18 +651,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -745,9 +741,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" @@ -1154,7 +1154,9 @@ def get_max_countc(FD, labeled_array): ) max_inten = 0 - for i in tqdm(range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames"): + for i in tqdm( + range(FD.beg, FD.end, 1), desc="Get max intensity of ROIs in all frames" + ): (p, v) = FD.rdrawframe(i) w = np.where(timg[p])[0] @@ -1197,7 +1199,9 @@ def plot_g2_contrast( # fig = plt.figure(figsize=(14, 10)) fig = plt.figure() - plt.title("uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08) + plt.title( + "uid= %s_" % uid + "Contrast Factor for Each Q Rings", fontsize=14, y=1.08 + ) if qth is None: plt.axis("off") n = 1 diff --git a/pyCHX/v2/_futurepyCHX/chx_specklecp.py b/pyCHX/v2/_futurepyCHX/chx_specklecp.py index a4e5029..324e5bf 100644 --- a/pyCHX/v2/_futurepyCHX/chx_specklecp.py +++ b/pyCHX/v2/_futurepyCHX/chx_specklecp.py @@ -8,31 +8,23 @@ from __future__ import absolute_import, division, print_function import logging -import time -import six from skbeam.core import roi from skbeam.core.utils import bin_edges_to_centers, geometric_series logger = logging.getLogger(__name__) -import itertools import os -import sys from datetime import datetime from multiprocessing import Pool -import dill -import matplotlib as mpl import matplotlib.pyplot as plt import numpy as np -import scipy as sp import scipy.stats as st -from matplotlib.colors import LogNorm -from scipy.optimize import leastsq, minimize +from scipy.optimize import leastsq from tqdm import tqdm -from pyCHX.chx_compress import apply_async, go_through_FD, map_async, pass_FD, run_dill_encoded +from pyCHX.chx_compress import apply_async, pass_FD from pyCHX.chx_generic_functions import trans_data_to_pd @@ -151,14 +143,18 @@ def xsvsp_single( number_of_img = noframes for i in range(FD.beg, FD.end): pass_FD(FD, i) - label_arrays = [np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:]] + label_arrays = [ + np.array(label_array == i, dtype=np.int64) for i in np.unique(label_array)[1:] + ] qind, pixelist = roi.extract_label_indices(label_array) if norm is not None: norms = [ norm[ np.in1d( pixelist, - extract_label_indices(np.array(label_array == i, dtype=np.int64))[1], + extract_label_indices(np.array(label_array == i, dtype=np.int64))[ + 1 + ], ) ] for i in np.unique(label_array)[1:] @@ -354,7 +350,7 @@ def xsvsc_single( norm=None, progress_bar=True, ): - """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with phisical meaning, + """YG MOD@Octo 12, 2017, Change photon statistic error bar from sampling statistic bar to error bar with physical meaning, photon_number@one_particular_count = photon_tolal_number * photon_distribution@one_particular_count +/- sqrt( photon_number@one_particular_count ) @@ -403,8 +399,8 @@ def xsvsc_single( C. Carona and A. Fluerasu , "Photon statistics and speckle visibility spectroscopy with partially coherent x-rays" J. Synchrotron Rad., vol 21, p 1288-1295, 2014. - .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Suh, P.K. Dixon and - D.J. Durian "Speckle-visibilty Spectroscopy: A tool to study + .. [2] R. Bandyopadhyay, A. S. Gittings, S. S. Such, P.K. Dixon and + D.J. Durian "Speckle-visibility Spectroscopy: A tool to study time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005. There is an example in https://github.com/scikit-xray/scikit-xray-examples It will demonstrate the use of these functions in this module for @@ -696,7 +692,10 @@ def get_his_std_qi(data_pixel_qi, max_cts=None): bins = np.arange(max_cts) dqn, dqm = data_pixel_qi.shape # get histogram here - H = np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) / dqm + H = ( + np.apply_along_axis(np.bincount, 1, np.int_(data_pixel_qi), minlength=max_cts) + / dqm + ) # do average for different frame his = np.average(H, axis=0) std = np.std(H, axis=0) @@ -727,7 +726,9 @@ def get_his_std(data_pixel, rois, max_cts=None): for qi in range(noqs): pixelist_qi = np.where(qind == qi + 1)[0] # print(qi, max_cts) - bins, his[qi], std[qi], kmean[qi] = get_his_std_qi(data_pixel[:, pixelist_qi], max_cts) + bins, his[qi], std[qi], kmean[qi] = get_his_std_qi( + data_pixel[:, pixelist_qi], max_cts + ) return bins, his, std, kmean @@ -801,7 +802,9 @@ def get_binned_his_std_qi(data_pixel_qi, lag_steps, max_cts=None): i = 0 for lag in lag_steps: data_pixel_qi_ = np.sum(reshape_array(data_pixel_qi, lag), axis=1) - bins[i], his[i], std[i], kmean[i] = get_his_std_qi(data_pixel_qi_, max_cts * lag) + bins[i], his[i], std[i], kmean[i] = get_his_std_qi( + data_pixel_qi_, max_cts * lag + ) i += 1 return bins, his, std, kmean @@ -890,7 +893,6 @@ def get_bin_edges(num_times, num_rois, mean_roi, max_cts): ##for fit ################### -from scipy import stats from scipy.special import gamma, gammaln ###########################3 @@ -961,9 +963,9 @@ def nbinomlog1(p, hist, x, N, mu): Vary M (shape param) but mu (count rate) fixed (using leastsq) p: fitting parameter, in this case is M, coherent mode number - hist: histogram of photon count for each bin (is a number not probablity) + hist: histogram of photon count for each bin (is a number not probability) x: photon count - N: total photons count in the statistics, ( probablity = hist / N ) + N: total photons count in the statistics, ( probability = hist / N ) mu: average photon count for each bin """ @@ -1216,7 +1218,9 @@ def plot_xsvs_fit( Knorm_bin_edges[j, i][:L], spe_cts_all[j, i], ) - xscale = (x_ / x)[1] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] + xscale = (x_ / x)[ + 1 + ] # bin_edges[j, i][:-1][1]/ Knorm_bin_edges[j, i][:-1][1] # print( xscale ) else: max_cts_ = max_cts * lag_steps[j] @@ -1265,7 +1269,11 @@ def plot_xsvs_fit( # if j == 0: if j < 2: label = "nbinom_L" - txts = r"$M=%s$" % round(ML_val[i][j], 2) + "," + r"$K=%s$" % round(KL_val[i][j], 2) + txts = ( + r"$M=%s$" % round(ML_val[i][j], 2) + + "," + + r"$K=%s$" % round(KL_val[i][j], 2) + ) # print( ML_val[i] ) x = 0.05 y0 = 0.2 - j * 0.1 @@ -1273,7 +1281,9 @@ def plot_xsvs_fit( fontsize_ = fontsize * 2 else: fontsize_ = 18 - axes.text(x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes) + axes.text( + x=x, y=y0, s=txts, fontsize=fontsize_, transform=axes.transAxes + ) else: label = "" (art,) = axes.plot(fitx_, fitL, "-r", label=label) @@ -1344,7 +1354,9 @@ def save_KM(K_mean, KL_val, ML_val, qs=None, level_time=None, uid=None, path=Non + ["M_Fit_%s" % s for s in level_time] + ["Contrast_Fit_%s" % s for s in level_time] ) - data = np.hstack([(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)]) + data = np.hstack( + [(K_mean).T, kl.reshape(L, n), ml.reshape(L, n), (1 / ml).reshape(L, n)] + ) if qs is not None: qs = np.array(qs) l = ["q"] + l @@ -1388,9 +1400,15 @@ def get_his_std_from_pds(spec_pds, his_shapes=None): spec_std = np.zeros([M, N], dtype=np.object) for i in range(M): for j in range(N): - spec_his[i, j] = np.array(spec_pds[spkeys[1 + i * N + j]][~np.isnan(spec_pds[spkeys[1 + i * N + j]])]) + spec_his[i, j] = np.array( + spec_pds[spkeys[1 + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + i * N + j]]) + ] + ) spec_std[i, j] = np.array( - spec_pds[spkeys[1 + 2 * N + i * N + j]][~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]])] + spec_pds[spkeys[1 + 2 * N + i * N + j]][ + ~np.isnan(spec_pds[spkeys[1 + 2 * N + i * N + j]]) + ] ) return spec_his, spec_std @@ -1567,8 +1585,8 @@ def gammaDist(x, params): """Gamma distribution function M,K = params, where K is average photon counts , M is the number of coherent modes, - In case of high intensity, the beam behavors like wave and - the probability density of photon, P(x), satify this gamma function. + In case of high intensity, the beam behaviors like wave and + the probability density of photon, P(x), satisfy this gamma function. """ K, M = params @@ -1651,8 +1669,8 @@ def nbinom_dist(bin_values, K, M): def poisson(x, K): """Poisson distribution function. K is average photon counts - In case of low intensity, the beam behavors like particle and - the probability density of photon, P(x), satify this poisson function. + In case of low intensity, the beam behaviors like particle and + the probability density of photon, P(x), satisfy this poisson function. """ K = float(K) Pk = np.exp(-K) * power(K, x) / gamma(x + 1) @@ -1718,9 +1736,9 @@ def diff_mot_con_factor(times, relaxation_rate, contrast_factor, cf_baseline=0): negative_binom_distribution() function Notes """ - co_eff = (np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times) / ( - 2 * (relaxation_rate * times) ** 2 - ) + co_eff = ( + np.exp(-2 * relaxation_rate * times) - 1 + 2 * relaxation_rate * times + ) / (2 * (relaxation_rate * times) ** 2) return contrast_factor * co_eff + cf_baseline @@ -1733,7 +1751,7 @@ def plot_sxvs( xlim=[0, 3.5], time_steps=None, ): - """a convinent function to plot sxvs results""" + """a convenient function to plot sxvs results""" num_rings = spe_cts_all.shape[1] num_times = Knorm_bin_edges.shape[0] sx = int(round(np.sqrt(num_rings))) @@ -1779,7 +1797,7 @@ def fit_xsvs1( ylim=None, time_steps=None, ): - """a convinent function to plot sxvs results + """a convenient function to plot sxvs results supporting fit function include: 'bn': Negative Binomaial Distribution 'gm': Gamma Distribution @@ -1787,18 +1805,17 @@ def fit_xsvs1( """ from lmfit import Model - from scipy.interpolate import UnivariateSpline if func == "bn": mod = Model(nbinom_dist) elif func == "gm": - mod = Model(gamma_dist, indepdent_vars=["K"]) + mod = Model(gamma_dist, independent_vars=["K"]) elif func == "ps": mod = Model(poisson_dist) else: print("the current supporting function include 'bn', 'gm','ps'") - # g_mod = Model(gamma_dist, indepdent_vars=['K']) + # g_mod = Model(gamma_dist, independent_vars=['K']) # g_mod = Model( gamma_dist ) # n_mod = Model(nbinom_dist) # p_mod = Model(poisson_dist) @@ -1878,9 +1895,13 @@ def fit_xsvs1( fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000) fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000) if func == "bn": - fity = nbinom_dist(fitx, K_val[i][j], M_val[i][j]) # M and K are fitted best values + fity = nbinom_dist( + fitx, K_val[i][j], M_val[i][j] + ) # M and K are fitted best values label = "nbinom" - txt = "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + txt = ( + "K=" + "%.3f" % (K_val[i][0]) + "," + "M=" + "%.3f" % (M_val[i][0]) + ) elif func == "gm": fity = gamma_dist(fitx, K_mean[i] * 2**j, M_val[i][j]) label = "gamma" diff --git a/pyCHX/v2/_futurepyCHX/chx_xpcs_xsvs_jupyter_V1.py b/pyCHX/v2/_futurepyCHX/chx_xpcs_xsvs_jupyter_V1.py index d133286..499f656 100644 --- a/pyCHX/v2/_futurepyCHX/chx_xpcs_xsvs_jupyter_V1.py +++ b/pyCHX/v2/_futurepyCHX/chx_xpcs_xsvs_jupyter_V1.py @@ -11,7 +11,7 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): """Get Iq at different time edge (difined by slice_num and slice_width) for a list of uids Input: uid_list: list of string (uid) - setup_pargs: dict, for caculation of Iq, the key of this dict should include + setup_pargs: dict, for calculation of Iq, the key of this dict should include 'center': beam center 'dpix': pixel size 'lambda_': X-ray wavelength @@ -36,7 +36,9 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): good_start = 5 FD = Multifile(filename, good_start, N) Nimg = FD.end - FD.beg - time_edge = create_time_slice(Nimg, slice_num=slice_num, slice_width=slice_width, edges=None) + time_edge = create_time_slice( + Nimg, slice_num=slice_num, slice_width=slice_width, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) tstamp[uid] = time_edge[:, 0] * timeperframe @@ -47,7 +49,7 @@ def get_t_iqc_uids(uid_list, setup_pargs, slice_num=10, slice_width=1): def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): - """plot q2~Iq at differnt time""" + """plot q2~Iq at different time""" if ax is None: fig, ax = plt.subplots() q = qt @@ -69,7 +71,7 @@ def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf=""): def plot_t_iqc_uids(qs, iqsts, tstamps): - """plot q2~Iq at differnt time for a uid list""" + """plot q2~Iq at different time for a uid list""" keys = list(qs.keys()) fig, ax = plt.subplots() for uid in keys: @@ -99,11 +101,11 @@ def plot_entries_from_csvlist( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list csvs + plot entries for a list csvs Input: csv_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -245,11 +247,11 @@ def plot_entries_from_uids( YG June 9, 2017@CHX YG Sep 29, 2017@CHX. - plot enteries for a list uids + plot entries for a list uids Input: uid_list: list, a list of uid (string) inDir: string, imported folder for saved analysis results - key: string, plot entry, surport + key: string, plot entry, support 'g2' for one-time, 'iq' for q~iq 'mean_int_sets' for mean intensity of each roi as a function of frame @@ -301,7 +303,9 @@ def plot_entries_from_uids( filename = "uid=%s_Res.h5" % uid_dict[u] else: filename = filename_list[i] - total_res = extract_xpcs_results_from_h5(filename=filename, import_dir=inDiru, exclude_keys=["g12b"]) + total_res = extract_xpcs_results_from_h5( + filename=filename, import_dir=inDiru, exclude_keys=["g12b"] + ) if key == "g2": d = total_res[key][1:, qth] taus = total_res["taus"][1:] @@ -381,10 +385,10 @@ def plot_entries_from_uids( def get_iq_from_uids(uids, mask, setup_pargs): """Y.G. developed July 17, 2017 @CHX - Get q-Iq of a uids dict, each uid could corrrespond one frame or a time seriers + Get q-Iq of a uids dict, each uid could correspond one frame or a time seriers uids: dict, val: meaningful decription, key: a list of uids mask: bool-type 2D array - setup_pargs: dict, at least should contains, the following paramters for calculation of I(q) + setup_pargs: dict, at least should contains, the following parameters for calculation of I(q) 'Ldet': 4917.50495, 'center': [988, 1120], @@ -439,7 +443,9 @@ def get_iq_from_uids(uids, mask, setup_pargs): setup_pargs["uid"] = uidstr - qp_saxs, iq_saxs, q_saxs = get_circular_average(avg_img, mask, pargs=setup_pargs, save=True) + qp_saxs, iq_saxs, q_saxs = get_circular_average( + avg_img, mask, pargs=setup_pargs, save=True + ) if n == 0: iqs = np.zeros([len(q_saxs), Nuid + 1]) iqs[:, 0] = q_saxs @@ -477,8 +483,8 @@ def wait_func(wait_time=2): # print( 'Starting to do something here...') -def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): - """check the completion of a data uid acquistion +def wait_data_acquisition_finish(uid, wait_time=2, max_try_num=3): + """check the completion of a data uid acquisition Parameter: uid: wait_time: the waiting step in unit of second @@ -497,14 +503,14 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): try: get_meta_data(uid) FINISH = True - print("The data acquistion finished.") + print("The data acquisition finished.") print("Starting to do something here...") except: wait_func(wait_time=wait_time) w += 1 print("Try number: %s" % w) if w > max_try_num: - print("There could be something going wrong with data acquistion.") + print("There could be something going wrong with data acquisition.") print("Force to terminate after %s tries." % w) FINISH = True Fake_FINISH = False @@ -514,7 +520,7 @@ def wait_data_acquistion_finish(uid, wait_time=2, max_try_num=3): def get_uids_by_range(start_uidth=-1, end_uidth=0): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end uid number, i.e. -10, -1 + A wrap function to find uids by giving start and end uid number, i.e. -10, -1 Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -537,7 +543,7 @@ def get_uids_by_range(start_uidth=-1, end_uidth=0): def get_uids_in_time_period(start_time, stop_time): """Y.G. Dec 22, 2016 - A wrap funciton to find uids by giving start and end time + A wrap function to find uids by giving start and end time Return: uids: list, uid with 8 character length fuids: list, uid with full length @@ -558,7 +564,9 @@ def get_uids_in_time_period(start_time, stop_time): return np.array(uids), np.array(fuids) -def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3): +def do_compress_on_line( + start_time, stop_time, mask_dict=None, mask=None, wait_time=2, max_try_num=3 +): """Y.G. Mar 10, 2017 Do on-line compress by giving start time and stop time Parameters: @@ -577,7 +585,7 @@ def do_compress_on_line(start_time, stop_time, mask_dict=None, mask=None, wait_t print("*" * 50) print("Do compress for %s now..." % uid) if db[uid]["start"]["plan_name"] == "count": - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) @@ -635,13 +643,16 @@ def realtime_xpcs_analysis( print("*" * 50) # print('Do compress for %s now...'%uid) print("Starting analysis for %s now..." % uid) - if db[uid]["start"]["plan_name"] == "count" or db[uid]["start"]["plan_name"] == "manual_count": + if ( + db[uid]["start"]["plan_name"] == "count" + or db[uid]["start"]["plan_name"] == "manual_count" + ): # if db[uid]['start']['dtype'] =='xpcs': - finish = wait_data_acquistion_finish(uid, wait_time, max_try_num) + finish = wait_data_acquisition_finish(uid, wait_time, max_try_num) if finish: try: md = get_meta_data(uid) - ##corect some metadata + ##correct some metadata if md_update is not None: md.update(md_update) # if 'username' in list(md.keys()): @@ -664,7 +675,7 @@ def realtime_xpcs_analysis( except: print("There are something wrong with this data: %s..." % uid) else: - print("\nThis is not a XPCS series. We will simiply ignore it.") + print("\nThis is not a XPCS series. We will simply ignore it.") print("*" * 50) # print( 'Sleep 10 sec here!!!') @@ -691,7 +702,7 @@ def compress_multi_uids( Parameters: uids: list, a list of uid mask: bool array, mask array - force_compress: default is False, just load the compresssed data; + force_compress: default is False, just load the compressed data; if True, will compress it to overwrite the old compressed data para_compress: apply the parallel compress algorithm bin_frame_number: @@ -748,11 +759,11 @@ def compress_multi_uids( #################################################################################################### -##get_two_time_mulit_uids, sequential cal for uids, but apply parallel for each uid ## +##get_two_time_multi_uids, sequential cal for uids, but apply parallel for each uid ## ################################################################################################# -def get_two_time_mulit_uids( +def get_two_time_multi_uids( uids, roi_mask, norm=None, @@ -765,20 +776,20 @@ def get_two_time_mulit_uids( compress_path=None, ): """Calculate two time correlation by using auto_two_Arrayc func for a set of uids, - if the two-time resutls are already created, by default (force_generate=False), just pass + if the two-time results are already created, by default (force_generate=False), just pass Parameters: uids: list, a list of uid roi_mask: bool array, roi mask array norm: the normalization array path: string, where to save the two time - force_generate: default, False, if the two-time resutls are already created, just pass + force_generate: default, False, if the two-time results are already created, just pass if True, will force to calculate two-time no matter exist or not Return: None, save the two-time in as path + uid + 'uid=%s_g12b'%uid e.g., - get_two_time_mulit_uids( guids, roi_mask, norm= norm,bin_frame_number=1, + get_two_time_multi_uids( guids, roi_mask, norm= norm,bin_frame_number=1, path= data_dir,force_generate=False ) """ @@ -812,13 +823,20 @@ def get_two_time_mulit_uids( if not force_generate: if os.path.exists(filename + ".npy"): doit = False - print("The two time correlation function for uid=%s is already calculated. Just pass..." % uid) + print( + "The two time correlation function for uid=%s is already calculated. Just pass..." + % uid + ) if doit: data_pixel = Get_Pixel_Arrayc(FD, pixelist, norm=norm).get_data() g12b = auto_two_Arrayc(data_pixel, roi_mask, index=None) np.save(filename, g12b) del g12b - print("The two time correlation function for uid={} is saved as {}.".format(uid, filename)) + print( + "The two time correlation function for uid={} is saved as {}.".format( + uid, filename + ) + ) def get_series_g2_from_g12( @@ -840,7 +858,7 @@ def get_series_g2_from_g12( will use g12b length to replace this number by default is None, will = [ g12b.shape[0] ] dose_label: the label of each dose, also is the keys of returned g2, lag - log_taus: if true, will only return a g2 with the correponding tau values + log_taus: if true, will only return a g2 with the corresponding tau values as calculated by multi-tau defined taus Return: @@ -862,18 +880,22 @@ def get_series_g2_from_g12( # print( good_end ) if good_end > L: warnings.warn( - "Warning: the dose value is too large, and please check the maxium dose in this data set and give a smaller dose value. We will use the maxium dose of the data." + "Warning: the dose value is too large, and please check the maximum dose in this data set and give a smaller dose value. We will use the maximum dose of the data." ) good_end = L if not log_taus: - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :]) + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + ) else: # print( good_end, num_bufs ) lag_step = get_multi_tau_lag_steps(good_end, num_bufs) lag_step = lag_step[lag_step < good_end - good_start] # print( len(lag_steps ) ) lag_steps[key] = lag_step * time_step - g2[key] = get_one_time_from_two_time(g12b[good_start:good_end, good_start:good_end, :])[lag_step] + g2[key] = get_one_time_from_two_time( + g12b[good_start:good_end, good_start:good_end, :] + )[lag_step] return lag_steps, g2 @@ -882,10 +904,10 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): """ Calculate the frame number to be correlated by giving a X-ray exposure dose - Paramters: + Parameters: exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation) exp_time: float, the exposure time for a xpcs time sereies - dead_time: dead time for the fast shutter reponse time, CHX = 2ms + dead_time: dead time for the fast shutter response time, CHX = 2ms Return: noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time ) e.g., @@ -898,7 +920,7 @@ def get_fra_num_by_dose(exp_dose, exp_time, att=1, dead_time=2): return np.int_(np.array(exp_dose) / (exp_time + dead_time) / att) -def get_series_one_time_mulit_uids( +def get_series_one_time_multi_uids( uids, qval_dict, trans=None, @@ -912,7 +934,7 @@ def get_series_one_time_mulit_uids( imgs=None, direct_load_data=False, ): - """Calculate a dose depedent series of one time correlations from two time + """Calculate a dose dependent series of one time correlations from two time Parameters: uids: list, a list of uid trans: list, same length as uids, the transmission list @@ -931,7 +953,9 @@ def get_series_one_time_mulit_uids( """ if path is None: - print("Please calculate two time function first by using get_two_time_mulit_uids function.") + print( + "Please calculate two time function first by using get_two_time_multi_uids function." + ) else: taus_uids = {} g2_uids = {} @@ -1117,20 +1141,22 @@ def plot_dose_g2( # return taus_dict, g2_dict -def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False): +def run_xpcs_xsvs_single( + uid, run_pargs, md_cor=None, return_res=False, reverse=True, clear_plot=False +): """Y.G. Dec 22, 2016 Run XPCS XSVS analysis for a single uid Parameters: uid: unique id run_pargs: dict, control run type and setup parameters, such as q range et.al. - reverse:,True, revserse the image upside down + reverse:,True, reverse the image upside down Return: save analysis result to csv/png/h5 files return_res: if true, return a dict, containing g2,g4,g12,contrast et.al. depending on the run type An example for the run_pargs: run_pargs= dict( - scat_geometry = 'gi_saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) + scat_geometry = 'gi_saxs' #support 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs) force_compress = True,#False, para_compress = True, run_fit_form = False, @@ -1298,7 +1324,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if md["detector"] == "eiger1m_single_image": Chip_Mask = np.load("/XF11ID/analysis/2017_1/masks/Eiger1M_Chip_Mask.npy") elif md["detector"] == "eiger4m_single_image" or md["detector"] == "image": - Chip_Mask = np.array(np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool) + Chip_Mask = np.array( + np.load("/XF11ID/analysis/2017_1/masks/Eiger4M_chip_mask.npy"), dtype=bool + ) BadPix = np.load("/XF11ID/analysis/2018_1/BadPix_4M.npy") Chip_Mask.ravel()[BadPix] = 0 elif md["detector"] == "eiger500K_single_image": @@ -1355,7 +1383,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) # print_dict( setup_pargs ) - mask = load_mask(mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse) + mask = load_mask( + mask_path, mask_name, plot_=False, image_name=uidstr + "_mask", reverse=reverse + ) mask *= pixel_mask if md["detector"] == "eiger4m_single_image": mask[:, 2069] = 0 # False #Concluded from the previous results @@ -1398,7 +1428,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= photon_occ = len(np.where(avg_img)[0]) / (imgsa[0].size) # compress = photon_occ < .4 #if the photon ocupation < 0.5, do compress print("The non-zeros photon occupation is %s." % (photon_occ)) - print("Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process.") + print( + "Will " + "Always " + ["NOT", "DO"][compress] + " apply compress process." + ) # good_start = 5 #make the good_start at least 0 t0 = time.time() filename = "/XF11ID/analysis/Compressed_Data" + "/uid_%s.cmp" % md["uid"] @@ -1423,7 +1455,15 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) print(uid_) plot1D( - y=imgsum[np.array([i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])], + y=imgsum[ + np.array( + [ + i + for i in np.arange(good_start, len(imgsum)) + if i not in bad_frame_list + ] + ) + ], title=uidstr + "_imgsum", xlabel="Frame", ylabel="Total_Intensity", @@ -1434,7 +1474,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= mask = mask * Chip_Mask # %system free && sync && echo 3 > /proc/sys/vm/drop_caches && free - ## Get bad frame list by a polynominal fit + ## Get bad frame list by a polynomial fit bad_frame_list = get_bad_frame_list( imgsum, fit=True, @@ -1464,7 +1504,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= cmap=cmap_albula, ) - imgsum_y = imgsum[np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list])] + imgsum_y = imgsum[ + np.array([i for i in np.arange(len(imgsum)) if i not in bad_frame_list]) + ] imgsum_x = np.arange(len(imgsum_y)) save_lists( [imgsum_x, imgsum_y], @@ -1557,10 +1599,14 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= if scat_geometry != "ang_saxs": Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start # print( time_edge ) - qpt, iqst, qt = get_t_iqc(FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500) + qpt, iqst, qt = get_t_iqc( + FD, time_edge, mask * Chip_Mask, pargs=setup_pargs, nx=1500 + ) plot_t_iqc( qt, iqst, @@ -1615,7 +1661,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= path=data_dir, uid=uidstr, ) - qr_1d_pds = cal_1d_qr(avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs) + qr_1d_pds = cal_1d_qr( + avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs + ) plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique(np.array(list(qval_dict.values()))[:, 0]), @@ -1626,9 +1674,13 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) Nimg = FD.end - FD.beg - time_edge = create_time_slice(N=Nimg, slice_num=3, slice_width=1, edges=None) + time_edge = create_time_slice( + N=Nimg, slice_num=3, slice_width=1, edges=None + ) time_edge = np.array(time_edge) + good_start - qrt_pds = get_t_qrc(FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr) + qrt_pds = get_t_qrc( + FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid=uidstr + ) plot_qrt_pds(qrt_pds, time_edge, qz_index=0, uid=uidstr, path=data_dir) ############################## @@ -1643,7 +1695,11 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= save=True, path=data_dir, ) - if scat_geometry == "saxs" or scat_geometry == "gi_saxs" or scat_geometry == "gi_waxs": + if ( + scat_geometry == "saxs" + or scat_geometry == "gi_saxs" + or scat_geometry == "gi_waxs" + ): if run_waterfall: wat = cal_waterfallc( FD, @@ -1670,7 +1726,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= times_roi, mean_int_sets = cal_each_ring_mean_intensityc( FD, roi_mask, timeperframe=None, multi_cor=True ) - plot_each_ring_mean_intensityc(times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir) + plot_each_ring_mean_intensityc( + times_roi, mean_int_sets, uid=uidstr, save=True, path=data_dir + ) roi_avg = np.average(mean_int_sets, axis=0) uid_ = uidstr + "_fra_%s_%s" % (FD.beg, FD.end) @@ -2105,7 +2163,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ) if run_dose: - get_two_time_mulit_uids( + get_two_time_multi_uids( [uid], roi_mask, norm=norm, @@ -2124,7 +2182,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= dose_frame = np.int_([N / 8, N / 4, N / 2, 3 * N / 4, N * 0.99]) # N/32, N/16, N/8, N/4 ,N/2, 3*N/4, N*0.99 exposure_dose = tr * exposuretime * dose_frame - taus_uids, g2_uids = get_series_one_time_mulit_uids( + taus_uids, g2_uids = get_series_one_time_multi_uids( [uid], qval_dict, good_start=good_start, @@ -2152,7 +2210,7 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= append_name="", ) - # Speckel Visiblity + # Speckel Visibility if run_xsvs: max_cts = get_max_countc(FD, roi_mask) qind, pixelist = roi.extract_label_indices(roi_mask) @@ -2161,7 +2219,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= # time_steps = np.array( utils.geometric_series(2, len(imgs) ) ) time_steps = [0, 1] # only run the first two levels num_times = len(time_steps) - times_xsvs = exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + times_xsvs = ( + exposuretime + (2 ** (np.arange(len(time_steps))) - 1) * timeperframe + ) print("The max counts are: %s" % max_cts) ### Do historam @@ -2454,12 +2514,18 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= Exdt["mean_int_sets"] = mean_int_sets if run_one_time: if scat_geometry != "ang_saxs": - for k, v in zip(["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras]): + for k, v in zip( + ["taus", "g2", "g2_fit_paras"], [taus, g2, g2_fit_paras] + ): Exdt[k] = v else: - for k, v in zip(["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v]): + for k, v in zip( + ["taus_v", "g2_v", "g2_fit_paras_v"], [taus_v, g2_v, g2_fit_paras_v] + ): Exdt[k] = v - for k, v in zip(["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p]): + for k, v in zip( + ["taus_p", "g2_p", "g2_fit_paras_p"], [taus_p, g2_p, g2_fit_paras_p] + ): Exdt[k] = v if run_two_time: for k, v in zip( @@ -2483,7 +2549,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= ): Exdt[k] = v - export_xpcs_results_to_h5("uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt) + export_xpcs_results_to_h5( + "uid=%s_Res.h5" % md["uid"], data_dir, export_dict=Exdt + ) # extract_dict = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%md['uid'], import_dir = data_dir ) # Creat PDF Report pdf_out_dir = os.path.join("/XF11ID/analysis/", CYCLE, username, "Results/") @@ -2518,7 +2586,9 @@ def run_xpcs_xsvs_single(uid, run_pargs, md_cor=None, return_res=False, reverse= pname = pdf_out_dir + pdf_filename atch = [Attachment(open(pname, "rb"))] try: - update_olog_uid(uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch) + update_olog_uid( + uid=md["uid"], text="Add XPCS Analysis PDF Report", attachments=atch + ) except: print( "I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file." diff --git a/pyCHX/v2/_futurepyCHX/movie_maker.py b/pyCHX/v2/_futurepyCHX/movie_maker.py index bade9de..87240e4 100644 --- a/pyCHX/v2/_futurepyCHX/movie_maker.py +++ b/pyCHX/v2/_futurepyCHX/movie_maker.py @@ -23,7 +23,7 @@ def select_regoin( defined by verts e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end (dimy, dimx,) = img.shape - Giving cut postion, start, end, width""" + Giving cut position, start, end, width""" import numpy as np xs, xe, ys, ye = vert @@ -67,7 +67,6 @@ def save_png_series( dpi=100, ): import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -154,7 +153,6 @@ def movie_maker( ): import matplotlib.animation as animation import matplotlib.pyplot as plt - import numpy as np from matplotlib.colors import LogNorm """ @@ -219,7 +217,9 @@ def movie_maker( # print( cmap, vmin, vmax ) if not logs: - im = ax.imshow(i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax) + im = ax.imshow( + i0, origin="lower", cmap=cmap, interpolation="nearest", vmin=vmin, vmax=vmax + ) else: im = ax.imshow( i0, @@ -230,7 +230,9 @@ def movie_maker( ) # ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18) - ttl = ax.text(0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18) + ttl = ax.text( + 0.75, 0.2, "", transform=ax.transAxes, va="center", color="black", fontsize=18 + ) # print asp # fig.set_size_inches( [5., 5 * asp] ) diff --git a/pyCHX/v2/_futurepyCHX/xpcs_timepixel.py b/pyCHX/v2/_futurepyCHX/xpcs_timepixel.py index 264da7e..482aa5c 100644 --- a/pyCHX/v2/_futurepyCHX/xpcs_timepixel.py +++ b/pyCHX/v2/_futurepyCHX/xpcs_timepixel.py @@ -10,52 +10,24 @@ import numpy as np import pandas as pds from numpy import ( - apply_over_axes, arange, - arctan, - around, - array, digitize, dot, - exp, histogram, - histogramdd, hstack, hypot, indices, int_, intersect1d, linspace, - load, - log, - log10, - ma, - mean, - mgrid, - ones, - pi, - poly1d, - polyfit, - power, - ravel, - reshape, round, save, - shape, - sin, - sqrt, - std, - sum, - unique, - vstack, where, zeros, zeros_like, ) -from numpy.linalg import lstsq from tqdm import tqdm -from pyCHX.chx_compress import Multifile, go_through_FD, pass_FD from pyCHX.chx_libs import multi_tau_lags @@ -238,7 +210,9 @@ def compress_timepix_data( with_pickle=with_pickle, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) return pkl.load(open(filename + ".pkl", "rb")) # FD = Multifile(filename, 0, int(1e25) ) @@ -277,7 +251,9 @@ def create_timepix_compress_header(md, filename, nobytes=2, bins=1): fp.close() -def init_compress_timepix_data(pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True): +def init_compress_timepix_data( + pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True +): """YG.Dev@CHX Nov 19, 2017 with optimal algorithm by using complex index techniques Compress the timepixeldata, in a format of x, y, t @@ -617,7 +593,9 @@ def apply_timepix_mask(x, y, t, roi): return x[w], y[w], t[w] -def get_timepixel_data_from_series(data_dir, filename_prefix, total_filenum=72, colms=int(1e5)): +def get_timepixel_data_from_series( + data_dir, filename_prefix, total_filenum=72, colms=int(1e5) +): x = np.zeros(total_filenum * colms) y = np.zeros(total_filenum * colms) t = zeros(total_filenum * colms) @@ -753,14 +731,14 @@ def read_xyt_frame(n=1): def readframe_series(n=1): - """Using this universe name for all the loading fucntions""" + """Using this universe name for all the loading functions""" return read_xyt_frame(n) class xpcs(object): def __init__(self): """DOCUMENT __init__( ) - the initilization of the XPCS class + the initialization of the XPCS class """ self.version = "version_0" self.create_time = "July_14_2015" @@ -822,7 +800,7 @@ def make_qlist(self): def calqlist(self, qmask=None, shape="circle"): """DOCUMENT calqlist( qmask=,shape=, ) - calculate the equvilent pixel with a shape, + calculate the equivalent pixel with a shape, return qind: the index of q pixellist: the list of pixle diff --git a/pyCHX/xpcs_timepixel.py b/pyCHX/xpcs_timepixel.py index 85080c5..5c657fe 100644 --- a/pyCHX/xpcs_timepixel.py +++ b/pyCHX/xpcs_timepixel.py @@ -10,52 +10,24 @@ import numpy as np import pandas as pds from numpy import ( - apply_over_axes, arange, - arctan, - around, - array, digitize, dot, - exp, histogram, - histogramdd, hstack, hypot, indices, int_, intersect1d, linspace, - load, - log, - log10, - ma, - mean, - mgrid, - ones, - pi, - poly1d, - polyfit, - power, - ravel, - reshape, round, save, - shape, - sin, - sqrt, - std, - sum, - unique, - vstack, where, zeros, zeros_like, ) -from numpy.linalg import lstsq from tqdm import tqdm -from pyCHX.chx_compress import Multifile, go_through_FD, pass_FD from pyCHX.chx_libs import multi_tau_lags @@ -73,14 +45,30 @@ def get_timepixel_data(data_dir, filename, time_unit=1): # return np.array( data['Col'] ), np.array(data['Row']), np.array(data['GlobalTimeFine']) #*6.1 #in ps if time_unit != 1: try: - x, y, t = np.array(data["#Col"]), np.array(data["#Row"]), np.array(data["#ToA"]) * time_unit + x, y, t = ( + np.array(data["#Col"]), + np.array(data["#Row"]), + np.array(data["#ToA"]) * time_unit, + ) except: - x, y, t = np.array(data["#Col"]), np.array(data[" #Row"]), np.array(data[" #ToA"]) * time_unit + x, y, t = ( + np.array(data["#Col"]), + np.array(data[" #Row"]), + np.array(data[" #ToA"]) * time_unit, + ) else: try: - x, y, t = np.array(data["#Col"]), np.array(data["#Row"]), np.array(data["#ToA"]) + x, y, t = ( + np.array(data["#Col"]), + np.array(data["#Row"]), + np.array(data["#ToA"]), + ) except: - x, y, t = np.array(data["#Col"]), np.array(data[" #Row"]), np.array(data[" #ToA"]) + x, y, t = ( + np.array(data["#Col"]), + np.array(data[" #Row"]), + np.array(data[" #ToA"]), + ) return x, y, t - t.min() # * 25/4096. #in ns @@ -167,7 +155,14 @@ def get_FD_end_num(FD, maxend=1e10): def compress_timepix_data( - pos, t, tbins, filename=None, md=None, force_compress=False, nobytes=2, with_pickle=True + pos, + t, + tbins, + filename=None, + md=None, + force_compress=False, + nobytes=2, + with_pickle=True, ): """YG.Dev@CHX Nov 20, 2017 Compress the timepixeldata, in a format of x, y, t @@ -194,16 +189,30 @@ def compress_timepix_data( if force_compress: print("Create a new compress file with filename as :%s." % filename) return init_compress_timepix_data( - pos, t, tbins, filename=filename, md=md, nobytes=nobytes, with_pickle=with_pickle + pos, + t, + tbins, + filename=filename, + md=md, + nobytes=nobytes, + with_pickle=with_pickle, ) else: if not os.path.exists(filename): print("Create a new compress file with filename as :%s." % filename) return init_compress_timepix_data( - pos, t, tbins, filename=filename, md=md, nobytes=nobytes, with_pickle=with_pickle + pos, + t, + tbins, + filename=filename, + md=md, + nobytes=nobytes, + with_pickle=with_pickle, ) else: - print("Using already created compressed file with filename as :%s." % filename) + print( + "Using already created compressed file with filename as :%s." % filename + ) return pkl.load(open(filename + ".pkl", "rb")) # FD = Multifile(filename, 0, int(1e25) ) @@ -242,7 +251,9 @@ def create_timepix_compress_header(md, filename, nobytes=2, bins=1): fp.close() -def init_compress_timepix_data(pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True): +def init_compress_timepix_data( + pos, t, binstep, filename, mask=None, md=None, nobytes=2, with_pickle=True +): """YG.Dev@CHX Nov 19, 2017 with optimal algorithm by using complex index techniques Compress the timepixeldata, in a format of x, y, t @@ -502,7 +513,17 @@ class Get_TimePixel_Arrayc(object): """ def __init__( - self, pos, hitime, tbins, pixelist, beg=None, end=None, norm=None, flat_correction=None, detx=256, dety=256 + self, + pos, + hitime, + tbins, + pixelist, + beg=None, + end=None, + norm=None, + flat_correction=None, + detx=256, + dety=256, ): """ indexable: a images sequences @@ -576,7 +597,9 @@ def apply_timepix_mask(x, y, t, roi): return x[w], y[w], t[w] -def get_timepixel_data_from_series(data_dir, filename_prefix, total_filenum=72, colms=int(1e5)): +def get_timepixel_data_from_series( + data_dir, filename_prefix, total_filenum=72, colms=int(1e5) +): x = np.zeros(total_filenum * colms) y = np.zeros(total_filenum * colms) t = zeros(total_filenum * colms) @@ -584,7 +607,11 @@ def get_timepixel_data_from_series(data_dir, filename_prefix, total_filenum=72, filename = filename_prefix + "_%s.csv" % n data = get_timepixel_data(data_dir, filename) if n != total_filenum - 1: - (x[n * colms : (n + 1) * colms], y[n * colms : (n + 1) * colms], t[n * colms : (n + 1) * colms]) = ( + ( + x[n * colms : (n + 1) * colms], + y[n * colms : (n + 1) * colms], + t[n * colms : (n + 1) * colms], + ) = ( data[0], data[1], data[2], @@ -593,7 +620,11 @@ def get_timepixel_data_from_series(data_dir, filename_prefix, total_filenum=72, # print( filename_prefix + '_%s.csv'%n ) ln = len(data[0]) # print( ln ) - (x[n * colms : n * colms + ln], y[n * colms : n * colms + ln], t[n * colms : n * colms + ln]) = ( + ( + x[n * colms : n * colms + ln], + y[n * colms : n * colms + ln], + t[n * colms : n * colms + ln], + ) = ( data[0], data[1], data[2], @@ -712,14 +743,14 @@ def read_xyt_frame(n=1): def readframe_series(n=1): - """Using this universe name for all the loading fucntions""" + """Using this universe name for all the loading functions""" return read_xyt_frame(n) class xpcs(object): def __init__(self): """DOCUMENT __init__( ) - the initilization of the XPCS class + the initialization of the XPCS class """ self.version = "version_0" self.create_time = "July_14_2015" @@ -781,7 +812,7 @@ def make_qlist(self): def calqlist(self, qmask=None, shape="circle"): """DOCUMENT calqlist( qmask=,shape=, ) - calculate the equvilent pixel with a shape, + calculate the equivalent pixel with a shape, return qind: the index of q pixellist: the list of pixle diff --git a/requirements-dev.txt b/requirements-dev.txt index 3b388e1..71e9d0b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,16 +4,16 @@ black codecov coverage flake8 +# These are dependencies of various sphinx extensions for documentation. +ipython isort +matplotlib nbstripout +numpydoc pre-commit pre-commit-hooks pytest sphinx -twine -# These are dependencies of various sphinx extensions for documentation. -ipython -matplotlib -numpydoc sphinx-copybutton sphinx_rtd_theme +twine diff --git a/requirements.txt b/requirements.txt index 2f0bb3b..149ae3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,11 @@ +git+https://github.com/NSLS-II-CHX/chxtools.git#egg=chxtools cython dask databroker dill +git+https://github.com/ChrisBeaumont/mpl-modest-image +git+https://github.com/Nikea/xray-vision.git#Xray-vision +git+https://github.com/NSLS-II-CHX/eiger-io.git#eiger-io historydict ipython lmfit @@ -11,16 +15,12 @@ pandas pillow pyyaml reportlab +git+https://github.com/scikit-beam/scikit-beam.git#egg=scikit-beam scikit-image scipy theano tifffile tqdm -git+https://github.com/scikit-beam/scikit-beam.git#egg=scikit-beam -git+https://github.com/NSLS-II-CHX/eiger-io.git#eiger-io -git+https://github.com/NSLS-II-CHX/chxtools.git#egg=chxtools -git+https://github.com/Nikea/xray-vision.git#Xray-vision -git+https://github.com/ChrisBeaumont/mpl-modest-image #pytables #git+https://github.com/soft-matter/slicerator.git #git+https://github.com/tqdm/tqdm.git#tqdm diff --git a/run_tests.py b/run_tests.py index 15a620f..0f38664 100644 --- a/run_tests.py +++ b/run_tests.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -import os import sys import pytest diff --git a/setup.py b/setup.py index 177a058..45fb36d 100644 --- a/setup.py +++ b/setup.py @@ -20,9 +20,7 @@ Upgrade pip like so: pip install --upgrade pip -""".format( - *(sys.version_info[:2] + min_version) - ) +""".format(*(sys.version_info[:2] + min_version)) sys.exit(error) here = path.abspath(path.dirname(__file__)) diff --git a/test-requirements.txt b/test-requirements.txt index 98b96f2..a4c1378 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,3 +1,3 @@ -coverage codecov +coverage pytest diff --git a/versioneer.py b/versioneer.py index 1cb0d80..8403e08 100644 --- a/versioneer.py +++ b/versioneer.py @@ -385,7 +385,10 @@ def get_root(): # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: - print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py) + ) except NameError: pass return root @@ -444,7 +447,10 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen( - [c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None) + [c] + args, + cwd=cwd, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), ) break except EnvironmentError: @@ -469,9 +475,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): return stdout -LONG_VERSION_PY[ - "git" -] = """ +LONG_VERSION_PY["git"] = """ # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build @@ -699,7 +703,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces @@ -991,7 +995,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): r = ref[len(tag_prefix) :] if verbose: print("picking %s" % r) - return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None} + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") @@ -1020,7 +1029,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) - describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) + describe_out = run_command( + GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root + ) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") @@ -1051,7 +1062,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out return pieces @@ -1061,7 +1072,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] @@ -1124,7 +1138,12 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): "prefix '%s'" % (root, dirname, parentdir_prefix) ) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - return {"version": dirname[len(parentdir_prefix) :], "full-revisionid": None, "dirty": False, "error": None} + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + } SHORT_VERSION_PY = """ @@ -1152,7 +1171,9 @@ def versions_from_file(filename): contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) + mo = re.search( + r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) @@ -1322,7 +1343,12 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + } class VersioneerBadRootError(Exception): @@ -1343,7 +1369,9 @@ def get_versions(verbose=False): handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, "please set versioneer.versionfile_source" + assert cfg.versionfile_source is not None, ( + "please set versioneer.versionfile_source" + ) assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1397,7 +1425,12 @@ def get_versions(verbose=False): if verbose: print("unable to compute version") - return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + } def get_version(): @@ -1527,7 +1560,9 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, self._versioneer_generated_versions) + write_to_version_file( + target_versionfile, self._versioneer_generated_versions + ) cmds["sdist"] = cmd_sdist @@ -1582,7 +1617,11 @@ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) - except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: + except ( + EnvironmentError, + configparser.NoSectionError, + configparser.NoOptionError, + ) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: @@ -1646,7 +1685,10 @@ def do_setup(): else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) + print( + " appending versionfile_source ('%s') to MANIFEST.in" + % cfg.versionfile_source + ) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: From 059793b2fc7f8e37c6981092e8da4c64d1879d56 Mon Sep 17 00:00:00 2001 From: jennmald Date: Wed, 15 Apr 2026 13:09:08 -0400 Subject: [PATCH 4/7] XPCS_XSVS_SAXS --- pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py | 364 +++++++++++++++----------- 1 file changed, 204 insertions(+), 160 deletions(-) diff --git a/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py b/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py index ecdafc0..1f5de04 100644 --- a/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py +++ b/pyCHX/XPCS_XSVS_SAXS_Multi_2017_V4.py @@ -1,19 +1,57 @@ # python XPCS_XSVS_SAXS_Multi_2017_V4.py - -from pyCHX.chx_packages import * +import os +import getpass +import numpy as np +from pyCHX.chx_generic_functions import ( + load_mask, + show_img, + save_arrays, + save_dict_csv, + psave_obj, + find_uids, + get_averaged_data_from_multi_res, + save_g2_general, + plot_g2_general, + plot_q_rate_fit_general, + get_q_rate_fit_general, + plot1D, + save_g2_fit_para_tocsv, + get_g2_fit_general, +) +from pyCHX.Two_Time_Correlation_Function import show_C12 +from pyCHX.chx_olog import update_olog_uid, Attachment +from pyCHX.Create_Report import ( + make_pdf_report, + export_xpcs_results_to_h5, + extract_xpcs_results_from_h5, +) +from pyCHX.chx_speckle import plot_g2_contrast +from pyCHX.chx_compress_analysis import plot_each_ring_mean_intensityc, plot_waterfallc +from pyCHX.SAXS import ( + show_saxs_qmap, + plot_circular_average, + plot_qIq_with_ROI, + plot_t_iqc, + show_ROI_on_image, +) +from pyCHX.XPCS_GiSAXS import show_qzr_roi, plot_qr_1d_with_ROI def XPCS_XSVS_SAXS_Multi( start_time, stop_time, run_pargs, + pdf_version, + timeperframe, suf_ids=None, + qval_dict=None, uid_average="Au50_7p5PEGX1_vs_slow_120116", ): + setup_pargs = dict(run_pargs) scat_geometry = run_pargs["scat_geometry"] - force_compress = run_pargs["force_compress"] - para_compress = run_pargs["para_compress"] + # force_compress = run_pargs["force_compress"] + # para_compress = run_pargs["para_compress"] run_fit_form = run_pargs["run_fit_form"] run_waterfall = run_pargs["run_waterfall"] run_t_ROI_Inten = run_pargs["run_t_ROI_Inten"] @@ -28,12 +66,12 @@ def XPCS_XSVS_SAXS_Multi( run_xsvs = False ############################################################### att_pdf_report = run_pargs["att_pdf_report"] - show_plot = run_pargs["show_plot"] - CYCLE = run_pargs["CYCLE"] + # show_plot = run_pargs["show_plot"] + # CYCLE = run_pargs["CYCLE"] mask_path = run_pargs["mask_path"] mask_name = run_pargs["mask_name"] good_start = run_pargs["good_start"] - use_imgsum_norm = run_pargs["use_imgsum_norm"] + # use_imgsum_norm = run_pargs["use_imgsum_norm"] mask = load_mask( mask_path, @@ -45,8 +83,8 @@ def XPCS_XSVS_SAXS_Multi( # mask *= pixel_mask mask[:, 2069] = 0 # False #Concluded from the previous results # np.save( data_dir + 'mask', mask) - show_img(mask, image_name="%s_mask" % uid_average, save=True, path=data_dir) - mask_load = mask.copy() + # show_img(mask, image_name="%s_mask" % uid_average, save=True, path=data_dir) + # mask_load = mask.copy() username = getpass.getuser() data_dir0 = os.path.join( @@ -59,18 +97,18 @@ def XPCS_XSVS_SAXS_Multi( uid_average = "uid=" + uid_average if suf_ids is None: - sids, uids, fuids = find_uids(start_time, stop_time) + _, uids, fuids = find_uids(start_time, stop_time) else: - sids, uids, fuids = suf_ids + _, uids, fuids = suf_ids print(uids) uid = uids[0] - data_dir_ = data_dir - uid_ = uid_average + # data_dir_ = data_dir + # uid_ = uid_average ### For Load results multi_res = {} - for uid, fuid in zip(guids, fuids): + for uid, fuid in zip(uids, fuids): multi_res[uid] = extract_xpcs_results_from_h5( filename="uid=%s_Res.h5" % fuid, import_dir=data_dir0 + uid + "/" ) @@ -88,7 +126,7 @@ def XPCS_XSVS_SAXS_Multi( iqst = get_averaged_data_from_multi_res(multi_res, keystr="iqst") elif scat_geometry == "gi_saxs": qr_1d_pds = get_averaged_data_from_multi_res(multi_res, keystr="qr_1d_pds") - qr_1d_pds = trans_data_to_pd(qr_1d_pds, label=qr_1d_pds_label) + # qr_1d_pds = trans_data_to_pd(qr_1d_pds, label=qr_1d_pds_label) if run_waterfall: wat = get_averaged_data_from_multi_res(multi_res, keystr="wat") if run_t_ROI_Inten: @@ -100,7 +138,7 @@ def XPCS_XSVS_SAXS_Multi( if run_one_time: g2 = get_averaged_data_from_multi_res(multi_res, keystr="g2") taus = get_averaged_data_from_multi_res(multi_res, keystr="taus") - g2_pds = save_g2_general( + _ = save_g2_general( g2, taus=taus, qr=np.array(list(qval_dict.values()))[:, 0], @@ -138,7 +176,7 @@ def XPCS_XSVS_SAXS_Multi( g2b = get_averaged_data_from_multi_res(multi_res, keystr="g2b") tausb = get_averaged_data_from_multi_res(multi_res, keystr="tausb") - g2b_pds = save_g2_general( + _ = save_g2_general( g2b, taus=tausb, qr=np.array(list(qval_dict.values()))[:, 0], @@ -174,7 +212,7 @@ def XPCS_XSVS_SAXS_Multi( if run_four_time: g4 = get_averaged_data_from_multi_res(multi_res, keystr="g4") taus4 = get_averaged_data_from_multi_res(multi_res, keystr="taus4") - g4_pds = save_g2_general( + _ = save_g2_general( g4, taus=taus4, qr=np.array(list(qval_dict.values()))[:, 0], @@ -191,56 +229,63 @@ def XPCS_XSVS_SAXS_Multi( times_xsvs = get_averaged_data_from_multi_res( multi_res, keystr="times_xsvs", different_length=False ) - cont_pds = save_arrays( + _ = save_arrays( contrast_factorL, label=times_xsvs, filename="%s_contrast_factorL.csv" % uid, path=data_dir, return_res=True, ) - if False: - spec_kmean = get_averaged_data_from_multi_res( - multi_res, keystr="spec_kmean" - ) - spec_pds = get_averaged_data_from_multi_res( - multi_res, keystr="spec_pds", different_length=False - ) - times_xsvs = get_averaged_data_from_multi_res( - multi_res, keystr="times_xsvs", different_length=False - ) - spec_his, spec_std = get_his_std_from_pds(spec_pds, his_shapes=None) - ML_val, KL_val, K_ = get_xsvs_fit( - spec_his, - spec_kmean, - spec_std, - max_bins=2, - varyK=False, - ) - contrast_factorL = get_contrast(ML_val) - spec_km_pds = save_KM( - spec_kmean, - KL_val, - ML_val, - qs=q_ring_center, - level_time=times_xsvs, - uid=uid_average, - path=data_dir_average, - ) - plot_xsvs_fit( - spec_his, - ML_val, - KL_val, - K_mean=spec_kmean, - spec_std=spec_std, - xlim=[0, 15], - vlim=[0.9, 1.1], - uid=uid_average, - qth=None, - logy=True, - times=times_xsvs, - q_ring_center=q_ring_center, - path=data_dir, - ) + # if False: + # spec_kmean = get_averaged_data_from_multi_res( + # multi_res, keystr="spec_kmean" + # ) + # spec_pds = get_averaged_data_from_multi_res( + # multi_res, keystr="spec_pds", different_length=False + # ) + # times_xsvs = get_averaged_data_from_multi_res( + # multi_res, keystr="times_xsvs", different_length=False + # ) + # spec_his, spec_std = get_his_std_from_pds(spec_pds, his_shapes=None) + # ML_val, KL_val, K_ = get_xsvs_fit( + # spec_his, + # spec_kmean, + # spec_std, + # max_bins=2, + # varyK=False, + # ) + # contrast_factorL = get_contrast(ML_val) + # spec_km_pds = save_KM( + # spec_kmean, + # KL_val, + # ML_val, + # qs=q_ring_center, + # level_time=times_xsvs, + # uid=uid_average, + # path=data_dir_average, + # ) + # plot_xsvs_fit( + # spec_his, + # ML_val, + # KL_val, + # K_mean=spec_kmean, + # spec_std=spec_std, + # xlim=[0, 15], + # vlim=[0.9, 1.1], + # uid=uid_average, + # qth=None, + # logy=True, + # times=times_xsvs, + # q_ring_center=q_ring_center, + # path=data_dir, + # ) + spec_pds = None + spec_km_pds = None + spec_kmean = None + roi_mask = None + pixel_mask = None + qr = None + qth_interest = None if scat_geometry == "saxs": show_saxs_qmap( @@ -294,7 +339,7 @@ def XPCS_XSVS_SAXS_Multi( show_ROI_on_image( avg_img, roi_mask, - center, + center=None, label_on=False, rwidth=700, alpha=0.9, @@ -311,7 +356,7 @@ def XPCS_XSVS_SAXS_Multi( vmin=0.1, vmax=np.max(avg_img * 0.1), logs=True, - image_name=uidstr + "_img_avg", + image_name=uid + "_img_avg", save=True, path=data_dir, ) @@ -320,18 +365,18 @@ def XPCS_XSVS_SAXS_Multi( qr_center=np.unique(np.array(list(qval_dict.values()))[:, 0]), loglog=False, save=True, - uid=uidstr, + uid=uid, path=data_dir, ) show_qzr_roi( avg_img, roi_mask, - inc_x0, - ticks, + inc_x0=None, + ticks=None, alpha=0.5, save=True, path=data_dir, - uid=uidstr, + uid=uid, ) if run_waterfall: @@ -382,7 +427,6 @@ def XPCS_XSVS_SAXS_Multi( g12b, q_ind=qth_interest, N1=0, - N2=min(len(imgsa), 1000), vmin=1.01, vmax=1.25, timeperframe=timeperframe, @@ -537,7 +581,7 @@ def XPCS_XSVS_SAXS_Multi( ): Exdt[k] = v - contr_pds = save_arrays( + _ = save_arrays( Exdt["contrast_factorL"], label=Exdt["times_xsvs"], filename="%s_contr.csv" % uid, @@ -582,7 +626,7 @@ def XPCS_XSVS_SAXS_Multi( text="Add XPCS Averaged Analysis PDF Report", attachments=atch, ) - except: + except Exception: print( "I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file." % pname @@ -593,92 +637,92 @@ def XPCS_XSVS_SAXS_Multi( # The End! -if False: - start_time, stop_time = ( - "2016-12-1 16:30:00", - "2016-12-1 16:31:50", - ) # for 10 nm, 20, for test purpose - suf_ids = find_uids(start_time, stop_time) - sp = "test" - uid_averages = [ - sp + "_vs_test1_120116", - sp + "_vs_test2_120116", - sp + "_vs_test3_120116", - ] - - run_pargs = dict( - scat_geometry="saxs", - # scat_geometry = 'gi_saxs', - force_compress=False, # True, #False, #True,#False, - para_compress=True, - run_fit_form=False, - run_waterfall=True, # False, - run_t_ROI_Inten=True, - # run_fit_g2 = True, - fit_g2_func="stretched", - run_one_time=True, # False, - run_two_time=True, # False, - run_four_time=False, # True, #False, - run_xsvs=True, - att_pdf_report=True, - show_plot=False, - CYCLE="2016_3", - # if scat_geometry == 'gi_saxs': - # mask_path = '/XF11ID/analysis/2016_3/masks/', - # mask_name = 'Nov16_4M-GiSAXS_mask.npy', - # elif scat_geometry == 'saxs': - mask_path="/XF11ID/analysis/2016_3/masks/", - mask_name="Nov28_4M_SAXS_mask.npy", - good_start=5, - #####################################for saxs - uniformq=True, - inner_radius=0.005, # 0.005 for 50 nmAu/SiO2, 0.006, #for 10nm/coralpor - outer_radius=0.04, # 0.04 for 50 nmAu/SiO2, 0.05, #for 10nm/coralpor - num_rings=12, - gap_ring_number=6, - number_rings=1, - ############################for gi_saxs - # inc_x0 = 1473, - # inc_y0 = 372, - # refl_x0 = 1473, - # refl_y0 = 730, - qz_start=0.025, - qz_end=0.04, - qz_num=3, - gap_qz_num=1, - # qz_width = ( qz_end - qz_start)/(qz_num +1), - qr_start=0.0025, - qr_end=0.07, - qr_num=14, - gap_qr_num=5, - definde_second_roi=True, - qz_start2=0.04, - qz_end2=0.050, - qz_num2=1, - gap_qz_num2=1, - qr_start2=0.002, - qr_end2=0.064, - qr_num2=10, - gap_qr_num2=5, - # qcenters = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1 - # width = 0.0002 - qth_interest=1, # the intested single qth - use_sqnorm=False, - use_imgsum_norm=True, - pdf_version="_1", # for pdf report name - ) - - step = 1 - Nt = len(uid_averages) - for i in range(Nt): - t0 = time.time() - suf_idsi = ( - suf_ids[0][i * step : (i + 1) * step], - suf_ids[1][i * step : (i + 1) * step], - suf_ids[2][i * step : (i + 1) * step], - ) - XPCS_XSVS_SAXS_Multi( - 0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i] - ) - - run_time(t0) +# if False: +# start_time, stop_time = ( +# "2016-12-1 16:30:00", +# "2016-12-1 16:31:50", +# ) # for 10 nm, 20, for test purpose +# suf_ids = find_uids(start_time, stop_time) +# sp = "test" +# uid_averages = [ +# sp + "_vs_test1_120116", +# sp + "_vs_test2_120116", +# sp + "_vs_test3_120116", +# ] + +# run_pargs = dict( +# scat_geometry="saxs", +# # scat_geometry = 'gi_saxs', +# force_compress=False, # True, #False, #True,#False, +# para_compress=True, +# run_fit_form=False, +# run_waterfall=True, # False, +# run_t_ROI_Inten=True, +# # run_fit_g2 = True, +# fit_g2_func="stretched", +# run_one_time=True, # False, +# run_two_time=True, # False, +# run_four_time=False, # True, #False, +# run_xsvs=True, +# att_pdf_report=True, +# show_plot=False, +# CYCLE="2016_3", +# # if scat_geometry == 'gi_saxs': +# # mask_path = '/XF11ID/analysis/2016_3/masks/', +# # mask_name = 'Nov16_4M-GiSAXS_mask.npy', +# # elif scat_geometry == 'saxs': +# mask_path="/XF11ID/analysis/2016_3/masks/", +# mask_name="Nov28_4M_SAXS_mask.npy", +# good_start=5, +# #####################################for saxs +# uniformq=True, +# inner_radius=0.005, # 0.005 for 50 nmAu/SiO2, 0.006, #for 10nm/coralpor +# outer_radius=0.04, # 0.04 for 50 nmAu/SiO2, 0.05, #for 10nm/coralpor +# num_rings=12, +# gap_ring_number=6, +# number_rings=1, +# ############################for gi_saxs +# # inc_x0 = 1473, +# # inc_y0 = 372, +# # refl_x0 = 1473, +# # refl_y0 = 730, +# qz_start=0.025, +# qz_end=0.04, +# qz_num=3, +# gap_qz_num=1, +# # qz_width = ( qz_end - qz_start)/(qz_num +1), +# qr_start=0.0025, +# qr_end=0.07, +# qr_num=14, +# gap_qr_num=5, +# define_second_roi=True, +# qz_start2=0.04, +# qz_end2=0.050, +# qz_num2=1, +# gap_qz_num2=1, +# qr_start2=0.002, +# qr_end2=0.064, +# qr_num2=10, +# gap_qr_num2=5, +# # qcenters = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1 +# # width = 0.0002 +# qth_interest=1, # the interested single qth +# use_sqnorm=False, +# use_imgsum_norm=True, +# pdf_version="_1", # for pdf report name +# ) + +# step = 1 +# Nt = len(uid_averages) +# for i in range(Nt): +# t0 = time.time() +# suf_idsi = ( +# suf_ids[0][i * step : (i + 1) * step], +# suf_ids[1][i * step : (i + 1) * step], +# suf_ids[2][i * step : (i + 1) * step], +# ) +# XPCS_XSVS_SAXS_Multi( +# 0, 0, run_pargs=run_pargs, suf_ids=suf_idsi, uid_average=uid_averages[i] +# ) + +# run_time(t0) From 63630dfdc546ed5a6e0cdc589f3b53aece7cba59 Mon Sep 17 00:00:00 2001 From: jennmald Date: Wed, 15 Apr 2026 13:23:14 -0400 Subject: [PATCH 5/7] SAXS and Stitching --- pyCHX/SAXS.py | 34 +++++++++++++++++----------------- pyCHX/Stitching.py | 6 +++--- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/pyCHX/SAXS.py b/pyCHX/SAXS.py index 189ef25..05a5e93 100644 --- a/pyCHX/SAXS.py +++ b/pyCHX/SAXS.py @@ -8,13 +8,12 @@ from lmfit import Model, Parameters, minimize from scipy.optimize import leastsq from scipy.special import gamma - +import skbeam.core.utils as utils from pyCHX.chx_generic_functions import find_index, plot1D, show_img # import matplotlib as mpl -# import matplotlib.pyplot as plt -# from matplotlib.colors import LogNorm -from pyCHX.chx_libs import * +import matplotlib.pyplot as plt +import numpy as np def mono_sphere_form_factor_intensity(x, radius, delta_rho=100, fit_func="G"): @@ -134,10 +133,10 @@ def poly_sphere_form_factor_intensity_q2( def find_index_old(x, x0, tolerance=None): # find the position of P in a list (plist) with tolerance - N = len(x) + _ = len(x) i = 0 position = None - if tolerance == None: + if tolerance is None: tolerance = (x[1] - x[0]) / 2.0 if x0 > max(x): position = len(x) - 1 @@ -543,7 +542,7 @@ def get_form_factor_fit2( for i in range(len(pfit)): try: error.append(np.absolute(pcov[i][i]) ** 0.5) - except: + except Exception: error.append(None) pfit_leastsq = pfit perr_leastsq = np.array(error) @@ -636,19 +635,19 @@ def get_form_factor_fit( # fit_power = 0 result = mod.fit(iq_ * q_**fit_power, pars, x=q_) # , fit_func=fit_func ) if function == "poly_sphere": - sigma = result.best_values["sigma"] + pass elif function == "mono_sphere": - sigma = 0 - r = result.best_values["radius"] + pass + _ = result.best_values["radius"] # scale = result.best_values['scale'] # baseline = result.best_values['baseline'] - delta_rho = result.best_values["delta_rho"] + _ = result.best_values["delta_rho"] print(result.best_values) return result, q_ def plot_form_factor_with_fit( - q, iq, q_, result, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs + q, iq, q_, result, y, fit_power=0, res_pargs=None, return_fig=False, *argv, **kwargs ): if res_pargs is not None: uid = res_pargs["uid"] @@ -671,7 +670,7 @@ def plot_form_factor_with_fit( plt.title("uid= %s:--->" % uid + title_qr, fontsize=20, y=1.02) r = result.best_values["radius"] - delta_rho = result.best_values["delta_rho"] + _ = result.best_values["delta_rho"] sigma = result.best_values["sigma"] ax.semilogy(q, iq, "ro", label="Form Factor") @@ -768,6 +767,7 @@ def fit_form_factor2( q, iq, guess_values, + y, fit_range=None, fit_variables=None, res_pargs=None, @@ -961,10 +961,10 @@ def show_saxs_qmap( # center = [ center[1], center[0] ] #due to python conventions w = width - img_ = np.zeros([w, w]) - minW, maxW = min(center[0] - w, center[1] - w), max(center[0] - w, center[1] - w) + # img_ = np.zeros([w, w]) + minW, _ = min(center[0] - w, center[1] - w), max(center[0] - w, center[1] - w) if w < minW: - img_ = img[cx - w // 2 : cx + w // 2, cy + w // 2 : cy + w // 2] + _ = img[cx - w // 2 : cx + w // 2, cy + w // 2 : cy + w // 2] # elif w > maxW: # img_[ cx-w//2:cx+w//2, cy+w//2:cy+w//2 ] = @@ -1151,7 +1151,7 @@ def plot_fit_sphere_form_factor(q, pq, res, p0=None, xlim=None, ylim=None): ax.text(x=0.02, y=0.15, s=txts, fontsize=14, transform=ax.transAxes) -def exm_plot(): +def exm_plot(iq, ff, q): fig, ax = plt.subplots() ax.semilogy(q, iq, "ro", label="data") diff --git a/pyCHX/Stitching.py b/pyCHX/Stitching.py index 335346d..ec07366 100644 --- a/pyCHX/Stitching.py +++ b/pyCHX/Stitching.py @@ -81,7 +81,7 @@ def Correct_Overlap_Images_Intensities( from scipy.signal import savgol_filter as sf Return: data: array, stitched image with corrected intensity - dataM: dict, each value is the image with correted intensity + dataM: dict, each value is the image with correlated intensity scale: scale for each image, the first scale=1 by definition scale_smooth: smoothed scale @@ -214,7 +214,7 @@ def stitch_WAXS_in_Qspace( qx_max = q_range[1] qxs = np.arange(q_range[0], q_range[1], dq) qzs = np.arange(q_range[2], q_range[3], dq) - QXs, QZs = np.meshgrid(qxs, qzs) + _, _ = np.meshgrid(qxs, qzs) num_qx = len(qxs) qz_min = q_range[2] qz_max = q_range[3] @@ -226,7 +226,7 @@ def stitch_WAXS_in_Qspace( # Intensity_mapN = np.zeros( (8, len(qzs), len(qxs)) ) for i in range(len(phis)): dM = np.rot90(dataM[i].T) - D = dM.ravel() + _ = dM.ravel() phi = phis[i] calibration.set_angles( det_phi_g=phi, det_theta_g=0.0, offset_x=dx, offset_y=dy, offset_z=dz From 775c6dc89208f2ec3fdd4d874e50338e91674fb2 Mon Sep 17 00:00:00 2001 From: jennmald Date: Wed, 15 Apr 2026 13:28:17 -0400 Subject: [PATCH 6/7] movie maker --- pyCHX/movie_maker.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/pyCHX/movie_maker.py b/pyCHX/movie_maker.py index 87240e4..e2f78fd 100644 --- a/pyCHX/movie_maker.py +++ b/pyCHX/movie_maker.py @@ -13,7 +13,7 @@ def read_imgs(inDir): return Images(inDir) -def select_regoin( +def select_region( img, vert, keep_shape=True, @@ -33,12 +33,12 @@ def select_regoin( try: img_[ys:ye, xs:xe] = True - except: + except Exception: img_[ys:ye, xs:xe, :] = True pixellist_ = np.where(img_.ravel())[0] # pixellist_ = img_.ravel() if qmask is not None: - b = np.where(qmask.flatten() == False)[0] + b = np.where(~qmask.flatten())[0] pixellist_ = np.intersect1d(pixellist_, b) # imgx = img[pixellist_] # imgx = imgx.reshape( xe-xs, ye-ys) @@ -49,7 +49,7 @@ def select_regoin( else: try: imgx = img[ys:ye, xs:xe] - except: + except Exception: imgx = img[ys:ye, xs:xe, :] return imgx @@ -88,7 +88,7 @@ def save_png_series( save png files """ - if uid == None: + if uid is None: uid = "uid" num_frame = 0 for img in imgs: @@ -98,19 +98,19 @@ def save_png_series( ax.get_yaxis().set_visible(False) if ROI is None: i0 = img - asp = 1.0 + _ = 1.0 else: - i0 = select_regoin( + i0 = select_region( img, ROI, keep_shape=False, ) xs, xe, ys, ye = ROI - asp = (ye - ys) / float(xe - xs) + _ = (ye - ys) / float(xe - xs) ax.set_aspect("equal") if not logs: - im = ax.imshow( + _ = ax.imshow( i0, origin="lower", cmap=cmap, @@ -119,7 +119,7 @@ def save_png_series( vmax=vmax, ) # vmin=0,vmax=1, else: - im = ax.imshow( + _ = ax.imshow( i0, origin="lower", cmap=cmap, @@ -202,16 +202,16 @@ def movie_maker( if ROI is None: i0 = imgs[0] - asp = 1.0 + _ = 1.0 else: - i0 = select_regoin( + i0 = select_region( imgs[0], ROI, keep_shape=False, ) xs, xe, ys, ye = ROI - asp = (ye - ys) / float(xe - xs) + _ = (ye - ys) / float(xe - xs) ax.set_aspect("equal") # print( cmap, vmin, vmax ) @@ -245,7 +245,7 @@ def update_img(n): if ROI is None: ign = imgs[n] else: - ign = select_regoin( + ign = select_region( imgs[n], ROI, keep_shape=False, From 2c80d131becd32f53d288621fbfbf05622084224 Mon Sep 17 00:00:00 2001 From: jennmald Date: Wed, 15 Apr 2026 13:33:01 -0400 Subject: [PATCH 7/7] data gonio --- pyCHX/DataGonio.py | 61 +++++++++++++++++++++++----------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/pyCHX/DataGonio.py b/pyCHX/DataGonio.py index c9a49c1..acda20a 100644 --- a/pyCHX/DataGonio.py +++ b/pyCHX/DataGonio.py @@ -12,6 +12,7 @@ ) from pyCHX.chx_generic_functions import average_array_withNan +import h5py def convert_Qmap( @@ -246,7 +247,7 @@ def __init__(self, infile=None, format="auto"): def load(self, infile, format="auto", invert=False): """Loads a mask from a a file. If this object already has some masking defined, then the new mask is 'added' to it. Thus, one can load multiple - masks to exlude various pixels.""" + masks to exclude various pixels.""" if format == "png" or infile[-4:] == ".png": self.load_png(infile, invert=invert) @@ -258,14 +259,14 @@ def load(self, infile, format="auto", invert=False): print("Couldn't identify mask format for %s." % (infile)) def load_blank(self, width, height): - """Creates a null mask; i.e. one that doesn't exlude any pixels.""" + """Creates a null mask; i.e. one that doesn't exclude any pixels.""" # TODO: Confirm that this is the correct order for x and y. self.data = np.ones((height, width)) def load_png(self, infile, threshold=127, invert=False): """Load a mask from a PNG image file. High values (white) are included, - low values (black) are exluded.""" + low values (black) are excluded.""" # Image should be black (0) for excluded pixels, white (255) for included pixels img = PIL.Image.open(infile).convert("L") # black-and-white @@ -526,7 +527,7 @@ def _generate_qxyz_maps(self): x = np.arange(self.width) - self.x0 y = np.arange(self.height) - self.y0 X, Y = np.meshgrid(x, y) - R = np.sqrt(X**2 + Y**2) + # R = np.sqrt(X**2 + Y**2) # twotheta = np.arctan(self.r_map()*c) # radians theta_f = np.arctan2(X * c, 1) # radians @@ -722,7 +723,7 @@ def _generate_qxyz_maps_no_offset(self): ) qz_c = -1 * k_over_Dprime * (d * np.sin(theta_g) + Y_c * np.cos(theta_g)) - qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c)) + # qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c)) q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c)) # Conversion factor for pixel coordinates @@ -732,7 +733,7 @@ def _generate_qxyz_maps_no_offset(self): x = np.arange(self.width) - self.x0 y = np.arange(self.height) - self.y0 X, Y = np.meshgrid(x, y) - R = np.sqrt(X**2 + Y**2) + # R = np.sqrt(X**2 + Y**2) # twotheta = np.arctan(self.r_map()*c) # radians theta_f = np.arctan2(X * c, 1) # radians @@ -818,27 +819,27 @@ def _generate_qxyz_maps(self): self.q_map_data = q_c self.qr_map_data = qr_c - if False: # True: - # Conversion factor for pixel coordinates - # (where sample-detector distance is set to d = 1) - c = (self.pixel_size_um / 1e6) / self.distance_m - - x = np.arange(self.width) - self.x0 - y = np.arange(self.height) - self.y0 - X, Y = np.meshgrid(x, y) - R = np.sqrt(X**2 + Y**2) - - # twotheta = np.arctan(self.r_map()*c) # radians - theta_f = np.arctan2(X * c, 1) # radians - # alpha_f_prime = np.arctan2( Y*c, 1 ) # radians - alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians - - self.qx_map_data1 = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) - self.qy_map_data1 = self.get_k() * ( - np.cos(theta_f) * np.cos(alpha_f) - 1 - ) # TODO: Check sign - self.qz_map_data1 = -1.0 * self.get_k() * np.sin(alpha_f) - - self.qr_map_data1 = np.sign(self.qx_map_data1) * np.sqrt( - np.square(self.qx_map_data1) + np.square(self.qy_map_data1) - ) + # if False: # True: + # # Conversion factor for pixel coordinates + # # (where sample-detector distance is set to d = 1) + # c = (self.pixel_size_um / 1e6) / self.distance_m + + # x = np.arange(self.width) - self.x0 + # y = np.arange(self.height) - self.y0 + # X, Y = np.meshgrid(x, y) + # R = np.sqrt(X**2 + Y**2) + + # # twotheta = np.arctan(self.r_map()*c) # radians + # theta_f = np.arctan2(X * c, 1) # radians + # # alpha_f_prime = np.arctan2( Y*c, 1 ) # radians + # alpha_f = np.arctan2(Y * c * np.cos(theta_f), 1) # radians + + # self.qx_map_data1 = self.get_k() * np.sin(theta_f) * np.cos(alpha_f) + # self.qy_map_data1 = self.get_k() * ( + # np.cos(theta_f) * np.cos(alpha_f) - 1 + # ) # TODO: Check sign + # self.qz_map_data1 = -1.0 * self.get_k() * np.sin(alpha_f) + + # self.qr_map_data1 = np.sign(self.qx_map_data1) * np.sqrt( + # np.square(self.qx_map_data1) + np.square(self.qy_map_data1) + # )