diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 198732e3..00000000 --- a/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -qcelemental/_version.py export-subst diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md deleted file mode 100644 index d73e7e15..00000000 --- a/.github/CONTRIBUTING.md +++ /dev/null @@ -1,57 +0,0 @@ -# QCElemental - -QCElemental is a project collecting fundamental tools for computational -molecular sciences (CMS) into a lightweight Python -interface. It is maintained by the Molecular Sciences Software -Institute (MolSSI) and contributed to by a number of community CMS code -developers and users. - -The project welcomes new contributions and comments! -Pull requests are tested and linted to ensure code integrity and quality. -New feature contributors are encouraged to first open a new issue or follow the -"chat on slack" badge on -[README.md](https://github.com/MolSSI/QCElemental/blob/master/README.md) -to the QCArchive workspace. - -# How to contribute - -We welcome contributions from external contributors, and this document -describes how to merge code changes into QCElemental. - -## Getting Started - -* Make sure you have a [GitHub account](https://github.com/signup/free). -* [Fork](https://help.github.com/articles/fork-a-repo/) this repository on GitHub. -* On your local machine, - [clone](https://help.github.com/articles/cloning-a-repository/) your fork of - the repository. - -## Making Changes - -* Add some really awesome code to your local fork. It's usually a [good - idea](http://blog.jasonmeridth.com/posts/do-not-issue-pull-requests-from-your-master-branch/) - to make changes on a - [branch](https://help.github.com/articles/creating-and-deleting-branches-within-your-repository/) - with the branch name relating to the feature you are going to add. -* When you are ready for others to examine and comment on your new feature, - navigate to your fork of QCElemental on GitHub and open a [pull - request](https://help.github.com/articles/using-pull-requests/) (PR). Note that - after you launch a PR from one of your fork's branches, all - subsequent commits to that branch will be added to the open pull request - automatically. Each commit added to the PR will be validated for - mergability, compilation and test suite compliance; the results of these tests - will be visible on the PR page. -* If you're providing a new feature, you must add test cases and documentation. -* When the code is ready to go, make sure you run the test suite using pytest. -* When you're ready to be considered for merging, check the "Ready to go" - box on the PR page to let the QCElemental devs know that the changes are complete. - The code will not be merged until this box is checked, the continuous - integration returns checkmarks, - and multiple core developers give "Approved" reviews. - -# Additional Resources - -* [General GitHub documentation](https://help.github.com/) -* [PR best practices](http://codeinthehole.com/writing/pull-requests-and-other-good-practices-for-teams-using-github/) -* [A guide to contributing to software packages](http://www.contribution-guide.org) -* [Thinkful PR example](http://www.thinkful.com/learn/github-pull-request-tutorial/#Time-to-Submit-Your-First-PR) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 84f94c5a..793955db 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -7,6 +7,6 @@ ## Status - + - [ ] Code base linted - [ ] Ready to go diff --git a/.github/workflows/CI.yaml b/.github/workflows/CI.yaml new file mode 100644 index 00000000..e23b7ae5 --- /dev/null +++ b/.github/workflows/CI.yaml @@ -0,0 +1,61 @@ +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.11"] + + steps: + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Checkout Code + uses: actions/checkout@v3 + - name: Install poetry + run: pip install poetry + - name: Install repo + run: poetry install --no-interaction --no-ansi + - name: Run tests + run: poetry run pytest -rws -v --cov=qcelemental --color=yes --cov-report=xml + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 # NEEDS UPDATE TO v3 https://github.com/codecov/codecov-action + - name: QCSchema Examples Deploy + uses: JamesIves/github-pages-deploy-action@4.1.1 + if: github.event_name == 'push' && github.repository == 'MolSSI/QCElemental' && ( startsWith( github.ref, 'refs/tags/' ) || github.ref == 'refs/heads/master' ) + with: + branch: qcschema-examples + folder: qcelemental/tests/qcschema_instances + + build_documentation: + needs: [test] + runs-on: ubuntu-latest + strategy: + fail-fast: false + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + name: Set up Python + with: + python-version: "3.7" + - name: Install poetry + run: pip install poetry + - name: Install repo + run: poetry install --no-interaction --no-ansi + - name: Build Documentation + run: bash scripts/build_docs.sh + - name: GitHub Pages Deploy + uses: JamesIves/github-pages-deploy-action@4.1.1 + if: github.event_name == 'push' && github.repository == 'MolSSI/QCElemental' && ( startsWith( github.ref, 'refs/tags/' ) || github.ref == 'refs/heads/master' ) + with: + branch: gh-pages + folder: build/docs/ diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml deleted file mode 100644 index 5718a9e8..00000000 --- a/.github/workflows/CI.yml +++ /dev/null @@ -1,123 +0,0 @@ -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - cfg: - - conda-env: minimal - python-version: 3.6 - label: mindep - - conda-env: base - python-version: 3.6 - label: minpy - - conda-env: base-cf - python-version: "3.10" - label: full - env: - PYVER: ${{ matrix.cfg.python-version }} - CONDA_ENV: ${{ matrix.cfg.conda-env }} - - steps: - - uses: actions/checkout@v1 - - - name: Setup Information - shell: bash - run: | - uname -a - df -h - ulimit -a - conda --version - - - name: Create Environment - shell: bash - run: | - eval "$(conda shell.bash hook)" && conda activate - python devtools/scripts/create_conda_env.py -n=test -p=$PYVER devtools/conda-envs/$CONDA_ENV.yaml - - - name: Install - shell: bash - run: | - eval "$(conda shell.bash hook)" && conda activate test - python -m pip install . --no-deps - - - name: Environment Information - shell: bash - run: | - eval "$(conda shell.bash hook)" && conda activate test - conda list --show-channel-urls - - - name: PyTest - shell: bash - run: | - eval "$(conda shell.bash hook)" && conda activate test - pytest -rws -v --cov=qcelemental --color=yes --cov-report=xml qcelemental/ - - - name: PyTest Validate - shell: bash - if: matrix.cfg.label == 'full' - run: | - eval "$(conda shell.bash hook)" && conda activate test - pytest -rws -v --color=yes --validate qcelemental/ - - - name: QCSchema Examples Deploy - uses: JamesIves/github-pages-deploy-action@4.1.1 - if: matrix.cfg.label == 'full' && github.event_name == 'push' && github.repository == 'MolSSI/QCElemental' && ( startsWith( github.ref, 'refs/tags/' ) || github.ref == 'refs/heads/master' ) - with: - branch: qcschema-examples - folder: qcelemental/tests/qcschema_instances - - - name: CodeCov - uses: codecov/codecov-action@v1 - - release_sphinx: - needs: [build] - defaults: - run: - shell: bash -l {0} - strategy: - fail-fast: false - matrix: - cfg: - - conda-env: docs-cf - python-version: 3.8 - label: Sphinx - runs-on: ubuntu-latest - - name: "🐍 ${{ matrix.cfg.python-version }} • ${{ matrix.cfg.label }}" - runs-on: ${{ matrix.cfg.runs-on }} - - steps: - - uses: actions/checkout@v2 - - - name: Create Environment - uses: conda-incubator/setup-miniconda@v2 - with: - activate-environment: test - environment-file: devtools/conda-envs/${{ matrix.cfg.conda-env }}.yaml - python-version: ${{ matrix.cfg.python-version }} - auto-activate-base: false - - - name: Environment Information - run: | - conda info - conda list --show-channel-urls - - name: Build Documentation - run: | - python -m pip install . --no-deps - cd docs - make html - - name: GitHub Pages Deploy - uses: JamesIves/github-pages-deploy-action@4.1.1 - if: github.event_name == 'push' && github.repository == 'MolSSI/QCElemental' && ( startsWith( github.ref, 'refs/tags/' ) || github.ref == 'refs/heads/master' ) - with: - branch: gh-pages - folder: docs/build/html diff --git a/.github/workflows/Lint.yml b/.github/workflows/Lint.yml index c13fdbea..1871b4bd 100644 --- a/.github/workflows/Lint.yml +++ b/.github/workflows/Lint.yml @@ -7,27 +7,57 @@ on: pull_request: jobs: - build: + black: runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.7] - steps: - - uses: actions/checkout@v1 - - - name: Python Setup - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Create Environment - shell: bash - run: | - python -m pip install --upgrade pip - python -m pip install black + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.7" + - name: Install black + run: pip install black + - name: Check code formatting with black + run: black --check . - - name: Lint - shell: bash - run: black qcelemental --check + isort: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.7" + - name: Install poetry + run: pip install poetry + - name: Install repo + run: poetry install --no-interaction --no-ansi + - name: Check import formatting with isort + run: poetry run isort --check-only --diff . +# TODO: Support flake8 when the repo is ready :) +# flake8: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - name: Set up Python +# uses: actions/setup-python@v4 +# with: +# python-version: "3.7" +# - name: Install flake8 +# run: pip install flake8 +# - name: Flake8 +# run: flake8 --count . +# TODO: Support mypy when the repo is ready +# mypy: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - name: Set up Python +# uses: actions/setup-python@v4 +# with: +# python-version: "3.8" +# - name: Install repo +# run: pip install -e '.[lint]' +# - name: mypy +# run: mypy . diff --git a/.gitignore b/.gitignore index 45f83e57..3159c840 100644 --- a/.gitignore +++ b/.gitignore @@ -64,8 +64,7 @@ instance/ .scrapy # Sphinx documentation -docs/build/ -docs/source/api/ +docs/api/ # PyBuilder target/ @@ -120,6 +119,7 @@ runinfo/* .vscode/ raw_data/**/*_blob.py -# autogen +# QCElemental Specific ignores qcschema/*.schema qcelemental/tests/qcschema_instances/*/*.json +poetry.lock diff --git a/.lgtm.yml b/.lgtm.yml deleted file mode 100644 index ee6f5f05..00000000 --- a/.lgtm.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Configure LGTM for this package - -extraction: - python: - python_setup: - version: 3 -path_classifiers: - library: - - versioneer.py # Set Versioneer.py to an external "library" (3rd party code) - - devtools/* - generated: - - nist_data/* - - qcelemental/_version.py -queries: -- exclude: py/not-named-self # Blocks Pydantic's @validator not accepting `self` until a better fix can be found -- exclude: py/missing-equals # Blocks Pydantic's equivalent decorator -- exclude: py/unsafe-cyclic-import # LGTM seems a bit pessimistic diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..37a79b5c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,45 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: check-added-large-files + args: ["--maxkb=250"] + - repo: https://github.com/Yelp/detect-secrets + rev: v1.4.0 + hooks: + - id: detect-secrets + stages: [commit] + # TODO: Update to 5.12.x once we drop Python3.7 support + # https://levelup.gitconnected.com/fix-runtimeerror-poetry-isort-5db7c67b60ff + - repo: https://github.com/PyCQA/isort + rev: 5.11.5 + hooks: + - id: isort + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + # TODO: Support flake8 when the repo is ready + # - repo: https://github.com/PyCQA/flake8 + # rev: 5.0.4 + # hooks: + # - id: flake8 + # TODO: Support mypy when the repo is ready + # - repo: https://github.com/pre-commit/mirrors-mypy + # rev: v1.1.1 + # hooks: + # - id: mypy + # additional_dependencies: + # [tokenize-rt==3.2.0, pydantic>=1.0.0] + - repo: local + hooks: + - id: tests + name: tests + stages: [push] + language: system + entry: bash scripts/test.sh + types: [python] + pass_filenames: false diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..d0641c35 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,100 @@ +# QCElemental + +QCElemental is a project collecting fundamental tools for computational molecular sciences (CMS) into a lightweight Python interface. It is maintained by the Molecular Sciences Software Institute (MolSSI) and contributed to by a number of community CMS code developers and users. + +The project welcomes new contributions and comments! Pull requests are tested and linted to ensure code integrity and quality. New feature contributors are encouraged to first open a new issue or follow the "chat on slack" badge on [README.md](https://github.com/MolSSI/QCElemental/blob/master/README.md) to the QCArchive workspace. + +# How to contribute + +We welcome contributions from external contributors, and this document describes how to submit code changes to QCElemental. + +- Make sure you have a [GitHub account](https://github.com/signup/free). +- [Fork](https://help.github.com/articles/fork-a-repo/) this repository on GitHub by clicking "Fork" on the top of the [GitHub repo](https://github.com/MolSSI/QCElemental). +- On your local machine, + [clone](https://help.github.com/articles/cloning-a-repository/) your fork of QCElemental. + ```sh + git clone https://github.com/{YOUR-GITHUB-USERNAME}/QCElemental.git + cd QCElemental + ``` +- Install [poetry](https://python-poetry.org/) if you do not have it on your system. Poetry will manage package dependencies and virtual environments for you. + ```sh + curl -sSL https://install.python-poetry.org | python3 - + ``` +- Install QCElemental. + + ```sh + poetry install + ``` + +- Activate your new virtual environment. Many editors--like VS Code--will do this for you automatically when you open a directory that has been installed with `poetry`. + + ```sh + poetry shell + ``` + +- Check your installation by running the tests. + + ```sh + bash scripts/test.sh + ``` + +- Look at the code coverage by opening the newly created `htmlcov/index.html` in your browser. This can help you evaluate the test coverage of new code that you add. + + ```sh + open htmlcov/index.html + ``` + +- Create a new branch for your work beginning with the word `feature-`: + + ```sh + git checkout -b feature-my-cool-feature-name + ``` + +- Install pre-commit hooks to have your code automatically formatted and linted when running `git commit`. If linting was required you'll need to run `git add .` again to stage the newly linted files and then try your commit again. Tests will run when you execute `git push`. If tests don't pass, the code will not push. Fix your tests/code, then commit and push again. + + ```sh + pre-commit install + pre-commit install --hook-type pre-push + ``` + +- If you ever need to commit or push without running the hooks add `--no-verify` to your command, i.e., + + ```sh + git commit --no-verify -m 'My commit message.' + ``` + +- Make changes to the code and commit your changes using git. You can lint your code (make sure it adheres to our code guidelines by standardizing code format, import ordering, spacing, etc.) without needing to deal with these details yourself by running: + + ```sh + bash scripts/format.sh + ``` + +- If you're providing a new feature, you must add test cases and documentation. + +- Push to your repo. When you are ready to submit your changes open a [Pull Request](https://github.com/MolSSI/QCElemental/pulls) on the MolSSI/QCElemental repo from your fork into the QCElemental `master` branch. When you're ready to be considered for merging, check the "Ready to go" box on the PR page to let the QCElemental developers know that the changes are complete. The code will not be merged until this box is checked, the continuous integration returns check marks, and multiple core developers give "Approved" reviews. + +## Building Docs and Packaging for Distribution + +- Build Docs: + + ```sh + bash scripts/build_docs.sh + ``` + +- Build packages for distribution. Build artifacts will be in `dist/`: + + ```sh + poetry build + ``` + +- Distribute built packages to PyPi: + ```sh + poetry publish --username {pypi_username} --password {pypi_password} + ``` + +## Additional Resources + +- [General GitHub documentation](https://help.github.com/) +- [PR best practices](http://codeinthehole.com/writing/pull-requests-and-other-good-practices-for-teams-using-github/) +- [A guide to contributing to software packages](http://www.contribution-guide.org) +- [Thinkful PR example](http://www.thinkful.com/learn/github-pull-request-tutorial/#Time-to-Submit-Your-First-PR) diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e933ff98..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,10 +0,0 @@ -recursive-include qcelemental *.py *.json *.md dummy -recursive-exclude qcelemental/checkup_data *.py *.md - -include setup.py -include README.md -include LICENSE -include MANIFEST.in - -include versioneer.py -include qcelemental/_version.py diff --git a/Makefile b/Makefile deleted file mode 100644 index 0135c37c..00000000 --- a/Makefile +++ /dev/null @@ -1,68 +0,0 @@ -.DEFAULT_GOAL := all -isort = isort --float-to-top qcelemental -black = black qcelemental -autoflake = autoflake -ir --remove-all-unused-imports --ignore-init-module-imports --remove-unused-variables qcelemental - -.PHONY: install -install: - pip install -e . - -.PHONY: format -format: -# $(autoflake) - $(isort) - $(black) - -.PHONY: lint -lint: - $(isort) --check-only - $(black) --check - -.PHONY: check-dist -check-dist: - python setup.py check -ms - python setup.py sdist - twine check dist/* - -.PHONY: mypy -mypy: - mypy qcelemental - -.PHONY: test -test: - pytest -v --cov=qcelemental/ - -.PHONY: data -data: cpu_data - #(cd devtools/scripts; python build_periodic_table.py; mv nist_*_atomic_weights.py ../../qcelemental/data/) - #(cd devtools/scripts; python build_physical_constants_2014.py; mv nist_*_codata.py ../../qcelemental/data/) - (cd raw_data/dft_data; python build_dft_info.py; mv dft_data_blob.py ../../qcelemental/info/data/) - (cd devtools/scripts; python build_physical_constants_2018.py; mv nist_*_codata.py ../../qcelemental/data/) - -.PHONY: cpu_data -cpu_data: - (cd raw_data/cpu_data; python build_cpu_data.py; mv cpu_data_blob.py ../../qcelemental/info/data/) - -.PHONY: qcschema -qcschema: - mkdir -p qcschema - python -c "exec(\"import pathlib, qcelemental\nfor md in qcelemental.models.qcschema_models():\n\tmfile = (pathlib.Path('qcschema') / md.__name__).with_suffix('.schema')\n\twith open(mfile, 'w') as fp:\n\t\tfp.write(md.schema_json(indent=None))\")" - python -c "exec(\"import json, pathlib, pydantic, qcelemental\nwith open((pathlib.Path('qcschema') / 'QCSchema').with_suffix('.schema'), 'w') as fp:\n\tjson.dump(pydantic.schema.schema(qcelemental.models.qcschema_models(), title='QCSchema'), fp, indent=4)\")" - -.PHONY: clean -clean: - rm -rf `find . -name __pycache__` - rm -f `find . -type f -name '*.py[co]' ` - rm -f `find . -type f -name '*~' ` - rm -f `find . -type f -name '.*~' ` - rm -rf .cache - rm -rf .pytest_cache - rm -rf .mypy_cache - rm -rf htmlcov - rm -rf *.egg-info - rm -f .coverage - rm -f .coverage.* - rm -rf build - rm -rf dist - rm -f qcelemental/*.c qcelemental/*.so - python setup.py clean diff --git a/README.md b/README.md index 10415b60..eea589d1 100644 --- a/README.md +++ b/README.md @@ -2,24 +2,45 @@ [![Build Status](https://github.com/MolSSI/QCElemental/workflows/CI/badge.svg?branch=master)](https://github.com/MolSSI/QCElemental/actions?query=workflow%3ACI) [![codecov](https://img.shields.io/codecov/c/github/MolSSI/QCElemental.svg?logo=Codecov&logoColor=white)](https://codecov.io/gh/MolSSI/QCElemental) -[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/MolSSI/QCElemental.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/MolSSI/QCElemental/context:python) [![Documentation Status](https://img.shields.io/github/workflow/status/MolSSI/QCElemental/CI/master?label=docs&logo=readthedocs&logoColor=white)](http://docs.qcarchive.molssi.org/projects/qcelemental/en/latest/) [![Chat on Slack](https://img.shields.io/badge/chat-on_slack-green.svg?longCache=true&style=flat&logo=slack)](https://join.slack.com/t/qcarchive/shared_invite/enQtNDIzNTQ2OTExODk0LTE3MWI0YzBjNzVhNzczNDM0ZTA5MmQ1ODcxYTc0YTA1ZDQ2MTk1NDhlMjhjMmQ0YWYwOGMzYzJkZTM2NDlmOGM) -![python](https://img.shields.io/badge/python-3.6+-blue.svg) +![python](https://img.shields.io/badge/python-3.7+-blue.svg) -QCElemental is a resource module for quantum chemistry containing physical -constants and periodic table data from NIST and molecule handlers. +**Documentation:** [Read The Docs](http://docs.qcarchive.molssi.org/projects/qcelemental/en/latest/index.html) -Periodic Table and Physical Constants data are pulled from NIST srd144 and -srd121, respectively ([details](raw_data/README.md)) in a renewable manner -(class around NIST-published JSON file). +Core data structures for Quantum Chemistry. QCElemental also contains physical constants and periodic table data from NIST and molecule handlers. -This project also contains a generator, validator, and translator for [Molecule -QCSchema](https://molssi-qc-schema.readthedocs.io/en/latest/auto_topology.html). +Periodic Table and Physical Constants data are pulled from NIST srd144 and srd121, respectively ([details](raw_data/README.md)) in a renewable manner (class around NIST-published JSON file). -It is intended to keep the QCElemental code compatible with Python 3.6+ -as long as dependencies allow. Packages are assured for Python 3.8+. +This project also contains a generator, validator, and translator for [Molecule QCSchema](https://molssi-qc-schema.readthedocs.io/en/latest/auto_topology.html). +## ✨ Getting Started + +- Installation. QCElemental supports Python 3.7+. + + ```sh + python -m pip install qcelemental + ``` + +- To install QCElemental with molecule visualization capabilities (useful in iPython or Jupyter notebook environments): + + ```sh + python -m pip install 'qcelemental[viz]` + ``` + +- To install QCElemental with various alignment capabilities using `networkx` + + ```sh + python -m pip install 'qcelemental[align]` + ``` + +- Or install both: + + ```sh + python -m pip install 'qcelemental[viz,align]` + ``` + +- See [documentation](http://docs.qcarchive.molssi.org/projects/qcelemental/en/latest/index.html) ### Periodic Table @@ -77,6 +98,7 @@ conversion factors can be obtained: ### Covalent Radii Covalent radii are accessible for most of the periodic table from [Alvarez, Dalton Transactions (2008) doi:10.1039/b801115j](https://doi.org/10.1039/b801115j) ([details](qcelemental/data/alvarez_2008_covalent_radii.py.py)). + ```python >>> import qcelemental as qcel >>> qcel.covalentradii.get('I') @@ -96,6 +118,7 @@ qcelemental.exceptions.DataUnavailableError: ('covalent radius', 'Lv') ### van der Waals Radii Van der Waals radii are accessible for tmost of the periodic table from [Mantina, J. Phys. Chem. A (2009) doi: 10.1021/jp8111556](https://pubs.acs.org/doi/10.1021/jp8111556) ([details](qcelemental/data/mantina_2009_vanderwaals_radii.py)). + ```python >>> import qcelemental as qcel >>> qcel.vdwradii.get('I') diff --git a/.codecov.yml b/codecov.yml similarity index 92% rename from .codecov.yml rename to codecov.yml index 6fd293e8..3d16704d 100644 --- a/.codecov.yml +++ b/codecov.yml @@ -1,6 +1,6 @@ coverage: ignore: - - */tests/* + - "*/tests/*" - qcelemental/_version.py - setup.py status: diff --git a/devtools/README.md b/devtools/README.md deleted file mode 100644 index 3ee530aa..00000000 --- a/devtools/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Development, testing, and deployment tools - -This directory contains a collection of tools for running Continuous Integration (CI) tests, -conda installation, and other development tools not directly related to the coding process. - - -## Manifest - -### Continuous Integration - -GitHub Actions (GHA) is used to run the tests on the minimum and maximum supported Python versions. -A formatting check is also run through GHA. It can be run locally on Linux or Mac with `make format`. - -## How to contribute changes -- Clone the repository if you have write access to the main repo, fork the repository if you are a collaborator. -- Make a new branch with `git checkout -b {your branch name}` -- Make changes and test your code -- Push the branch to the repo (either the main or your fork) with `git push -u origin {your branch name}` - * Note that `origin` is the default name assigned to the remote, yours may be different -- Make a PR on GitHub with your changes -- We'll review the changes and get your code into the repo after lively discussion! - - -## Checklist for updates -- [ ] Make sure there is an/are issue(s) opened for your specific update -- [ ] Create the PR, referencing the issue -- [ ] Debug the PR as needed until tests pass -- [ ] Tag the final, debugged version - * `git tag -a X.Y.Z [latest pushed commit] && git push --follow-tags` -- [ ] Get the PR merged in - -## Versioneer Auto-version -[Versioneer](https://github.com/warner/python-versioneer) will automatically infer what version -is installed by looking at the `git` tags and how many commits ahead this version is. The format follows -[PEP 440](https://www.python.org/dev/peps/pep-0440/) and has the regular expression of: -```regexp -\d+.\d+.\d+(?\+\d+-[a-z0-9]+) -``` -If the version of this commit is the same as a `git` tag, the installed version is the same as the tag, -e.g. `qcfractal-0.1.2`, otherwise it will be appended with `+X` where `X` is the number of commits -ahead from the last tag, and then `-YYYYYY` where the `Y`'s are replaced with the `git` commit hash. diff --git a/devtools/conda-envs/README.md b/devtools/conda-envs/README.md deleted file mode 100644 index 7847721a..00000000 --- a/devtools/conda-envs/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# QCElemental Pre-built Conda Environments - -The QCElemental program has few requirements on its own `meta.yaml` file, however, -you may want to emulate the server side of things on your own. To help make that -possible, we have provided the various YAML files here which can be used -to quickly and mostly automatically build a working environment for to emulate -the server. - -These use the `conda env create` commands (examples below) instead of the -more common `conda create` (the commands are slightly different as of writing, -circa Conda 4.3), so note the difference in commands. - -* `base.yaml` is environment specification for general use. -* `base-cf.yaml` is the conda-forge-only version of `base.yaml`. -* `minimal.yaml` is primarily for CI canary testing. It specifies - minimal dependency set and pins to what we think are the minimal versions. - -## Requirements to use Environments - -1. `git` -2. `conda` -3. `conda` installed `pip` (pretty much always available unless you are in - some custom Python-less Conda environment such as an `R`-based env.) -4. Network access - -## Setup/Install - -Run the following command to configure a new environment with the replacements: - -* `{name}`: Replace with whatever you want to call the new env -* `{file}`: Replace with target file - -```bash -conda env create -n {name} -f {file} -``` - -To access the new environment: -```bash -conda activate {name} -``` - diff --git a/devtools/conda-envs/base-cf.yaml b/devtools/conda-envs/base-cf.yaml deleted file mode 100644 index 896306a2..00000000 --- a/devtools/conda-envs/base-cf.yaml +++ /dev/null @@ -1,22 +0,0 @@ -name: test -channels: - - conda-forge -dependencies: - # Base depends - - numpy>=1.12.0 - - nomkl - - python - - pint>=0.10.0 - - pydantic>=1.8.2 - - # Optional depends - - msgpack-python - - networkx>=2.4.0 - - nglview - - # Testing - - pytest - - pytest-cov - - codecov - - scipy # tests an aspect of a helper fn not used by qcel functionality - - jsonschema diff --git a/devtools/conda-envs/base.yaml b/devtools/conda-envs/base.yaml deleted file mode 100644 index 6ffadfad..00000000 --- a/devtools/conda-envs/base.yaml +++ /dev/null @@ -1,24 +0,0 @@ -name: test -channels: - - defaults - - conda-forge -dependencies: - # Base depends - - numpy>=1.12.0 - - nomkl - - python - - pint>=0.10.0 - - pydantic>=1.8.2 - - dataclasses # only for py36 and only b/c default channel pydantic missing the conditional dep - - # Optional depends - - msgpack-python - - networkx>=2.4.0 - - nglview - - # Testing - - pytest - - pytest-cov - - codecov - - scipy # tests an aspect of a helper fn not used by qcel functionality - - jsonschema diff --git a/devtools/conda-envs/docs-cf.yaml b/devtools/conda-envs/docs-cf.yaml deleted file mode 100644 index 72ec0166..00000000 --- a/devtools/conda-envs/docs-cf.yaml +++ /dev/null @@ -1,29 +0,0 @@ -name: docs -channels: - - conda-forge - - nodefaults -dependencies: - - python - - networkx - - pydantic - - numpy - - pint - - pip - - # qc - - # docs - - python-graphviz - - sphinx >=3.5 - - sphinx-autodoc-typehints - - sphinx-automodapi - - sphinx_rtd_theme - - autodoc-pydantic - - # testing - - pytest - - pytest-cov - - codecov - - #- pip: - # - git+https://github.com/MolSSI/qcarchive-sphinx-theme#egg=qcarchive_sphinx_theme diff --git a/devtools/conda-envs/minimal.yaml b/devtools/conda-envs/minimal.yaml deleted file mode 100644 index 73a78f42..00000000 --- a/devtools/conda-envs/minimal.yaml +++ /dev/null @@ -1,18 +0,0 @@ -name: test -channels: - - defaults - - conda-forge -dependencies: - # Base depends - - numpy=1.14 # technically, pint has an optional >=1.12.0 numpy dep but c-f doesn't have py38 builds for it - - nomkl - - python - - pint=0.10.0 # technically, qcel has no lower bound for pint version for py36,37 but needs 0.10 for 38 - - pydantic=1.8.2 - - dataclasses # until drop py36 - - # Testing - - pytest=4.6.4 # technically, qcel works with 4.0.0 but c-f doesn't have py38 builds for it - - pytest-cov - - codecov - - jsonschema diff --git a/devtools/scripts/build_physical_constants_2014.py b/devtools/scripts/build_physical_constants_2014.py index f53dffda..772d598f 100644 --- a/devtools/scripts/build_physical_constants_2014.py +++ b/devtools/scripts/build_physical_constants_2014.py @@ -2,8 +2,9 @@ This file will generate a JSON blob usable by QCElemental for physical constants """ -import json import datetime +import json + import requests from yapf.yapflib.yapf_api import FormatCode @@ -13,9 +14,9 @@ title = metadata["title"] date_modified = metadata["modified"] -year = date_modified.split('-')[0] -doi = metadata['distribution'][-1]['accessURL'].strip('https://dx.doi.org/') -url = metadata['distribution'][0]['downloadURL'] +year = date_modified.split("-")[0] +doi = metadata["distribution"][-1]["accessURL"].strip("https://dx.doi.org/") +url = metadata["distribution"][0]["downloadURL"] access_date = str(datetime.datetime.utcnow()) constants = requests.get(url).json() @@ -33,7 +34,9 @@ """ -'''.format(year, title, date_modified, doi, url, access_date) +'''.format( + year, title, date_modified, doi, url, access_date +) constants_json = { "title": title, @@ -41,20 +44,20 @@ "doi": doi, "url": url, "access_data": access_date, - "constants": {} + "constants": {}, } -for pc in constants['constant']: - value = pc['Value'].strip() - uncertainty = pc['Uncertainty'] - if uncertainty == '(exact)': - value = value.replace('...', '') +for pc in constants["constant"]: + value = pc["Value"].strip() + uncertainty = pc["Uncertainty"] + if uncertainty == "(exact)": + value = value.replace("...", "") constants_json["constants"][pc["Quantity "].lower()] = { "quantity": pc["Quantity "], "unit": pc["Unit"], "value": value.replace(" ", ""), - 'uncertainty': uncertainty + "uncertainty": uncertainty, } output += "nist_{}_codata = {}".format(year, constants_json) diff --git a/devtools/scripts/build_physical_constants_2018.py b/devtools/scripts/build_physical_constants_2018.py index 349260e6..9f553216 100644 --- a/devtools/scripts/build_physical_constants_2018.py +++ b/devtools/scripts/build_physical_constants_2018.py @@ -2,14 +2,12 @@ This file will generate a JSON blob usable by QCElemental for physical constants """ -import os -import math -import json import datetime -import requests -import pandas as pd -import black +import os +import black +import pandas as pd +import requests table_url = "https://physics.nist.gov/cuu/Constants/Table/allascii.txt" @@ -62,7 +60,13 @@ constants_json["constants"][pc["Quantity"].lower()] = { "quantity": pc["Quantity"], - "unit": str(pc["Unit"]).replace("nan", "").replace("^-1", "^{-1}").replace("^-2", "^{-2}").replace("^-3", "^{-3}").replace("^-4", "^{-4}").replace("_90", "_{90}"), + "unit": str(pc["Unit"]) + .replace("nan", "") + .replace("^-1", "^{-1}") + .replace("^-2", "^{-2}") + .replace("^-3", "^{-3}") + .replace("^-4", "^{-4}") + .replace("_90", "_{90}"), "value": value.replace(" ", ""), "uncertainty": uncertainty, } diff --git a/devtools/scripts/create_conda_env.py b/devtools/scripts/create_conda_env.py index 53332f0f..8cfe8641 100644 --- a/devtools/scripts/create_conda_env.py +++ b/devtools/scripts/create_conda_env.py @@ -4,10 +4,10 @@ import subprocess as sp # Args -parser = argparse.ArgumentParser(description='Creates a conda environment from file for a given Python version.') -parser.add_argument('-n', '--name', type=str, nargs=1, help='The name of the created Python environment') -parser.add_argument('-p', '--python', type=str, nargs=1, help='The version of the created Python environment') -parser.add_argument('conda_file', nargs='*', help='The file for the created Python environment') +parser = argparse.ArgumentParser(description="Creates a conda environment from file for a given Python version.") +parser.add_argument("-n", "--name", type=str, nargs=1, help="The name of the created Python environment") +parser.add_argument("-p", "--python", type=str, nargs=1, help="The version of the created Python environment") +parser.add_argument("conda_file", nargs="*", help="The file for the created Python environment") args = parser.parse_args() diff --git a/devtools/travis-ci/before_install.sh b/devtools/travis-ci/before_install.sh deleted file mode 100755 index 283a7d2e..00000000 --- a/devtools/travis-ci/before_install.sh +++ /dev/null @@ -1,32 +0,0 @@ -# Temporarily change directory to $HOME to install software -pushd . -cd $HOME - -# Install Miniconda -if [ "$TRAVIS_OS_NAME" == "osx" ]; then - # Make OSX md5 mimic md5sum from linux, alias does not work - md5sum () { - command md5 -r "$@" - } - MINICONDA=Miniconda3-latest-MacOSX-x86_64.sh -else - MINICONDA=Miniconda3-latest-Linux-x86_64.sh -fi -MINICONDA_HOME=$HOME/miniconda -MINICONDA_MD5=$(wget -qO- https://repo.anaconda.com/miniconda/ | grep -A3 $MINICONDA | sed -n '4p' | sed -n 's/ *\(.*\)<\/td> */\1/p') -wget -q https://repo.anaconda.com/miniconda/$MINICONDA -if [[ $MINICONDA_MD5 != $(md5sum $MINICONDA | cut -d ' ' -f 1) ]]; then - echo "Miniconda MD5 mismatch" - exit 1 -fi -bash $MINICONDA -b -p $MINICONDA_HOME - -# Configure miniconda -export PIP_ARGS="-U" -export PATH=$MINICONDA_HOME/bin:$PATH - -conda config --set always_yes yes --set changeps1 no -conda update --q conda - -# Restore original directory -popd diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 31460e7a..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,21 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -#SPHINXOPTS = -n -W --keep-going -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = qcelemental -SOURCEDIR = source -BUILDDIR = build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 7188a1f4..00000000 --- a/docs/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Compiling QCFractal's Documentation - -The docs for this project are built with Sphinx. To compile the docs, first ensure that Sphinx and the ReadTheDocs theme are installed. - -``` -conda install sphinx sphinx_rtd_theme -``` - -Once installed, you can use the Makefile in this directory to compile static HTML pages by - -``` -make html -``` - -The compiled docs will be in the _build directory and can be viewed by opening index.html (which may itself be inside a directory called html/ depending on what version of Sphinx is installed). diff --git a/docs/source/_templates/layout.html b/docs/_templates/layout.html similarity index 100% rename from docs/source/_templates/layout.html rename to docs/_templates/layout.html diff --git a/docs/source/api.rst b/docs/api.rst similarity index 100% rename from docs/source/api.rst rename to docs/api.rst diff --git a/docs/source/changelog.rst b/docs/changelog.rst similarity index 94% rename from docs/source/changelog.rst rename to docs/changelog.rst index abe2905c..0322677f 100644 --- a/docs/source/changelog.rst +++ b/docs/changelog.rst @@ -16,6 +16,31 @@ Changelog .. Bug Fixes .. +++++++++ +Unreleased +------------------- + +Breaking Changes +++++++++++++++++ + +- (:pr:`308`) Fix CI Pipelines. Dropped Python3.6. Bring CI pipelines into harmony with local dev experience. Lint and format entire code base. Accelerate CI pipelines. Update setup.py to correctly define extras packages. Breaking change due to dropped support for Python3.6. No code functionality was altered. + - Dropped support for dead Python 3.6. Minimum supported Python is now 3.7. + - Updated CONTRIBUTING.md to contain detailed instructions for developers on how to contribute. + - Fixed broken code that failed to prepend the "v" to version numbers. + - Updated CI to run without conda and using only packages defined in setup.py. CI is now much faster and runs the same way for local developers and GitHub Actions. + - Added test.sh and format.sh to devtools/scripts for easy execution of formatting and testing. + - Formatted all code with black. Sorted imports with isort. + - Added pre-commit to repo so code formatting, linting, and testing will all run as part of regular git workflow. + +Enhancements +++++++++++++ +- (:pr:`310`) Modernize DevOps Tooling + - Added `/scripts` directory to root of project that contains scripts for testing, formatting code, and building docs. + - Updated build system from `setuptools` to modern `pyproject.toml` specification using `poetry` for the build backend. + - Removed complicated versioning code in favor of single source of truth in `pyproject.toml`. Using standard library `importlib` for looking up package version in `__init__.py` file. + - Added `build_docs.sh` script to `/scrips` and removed `Makefile` from `/docs`. Flattened `/docs` file structure. + - Removed `travis-ci` code from `devtools` + - Removed LGTM code (they no longer exist as a project). + - Bring all package directories under `black`, `isort`, and `autoflake` control. 0.25.1 / 2022-10-31 ------------------- diff --git a/docs/source/conf.py b/docs/conf.py similarity index 72% rename from docs/source/conf.py rename to docs/conf.py index ea7fdf9d..d420f73a 100644 --- a/docs/source/conf.py +++ b/docs/conf.py @@ -16,15 +16,14 @@ import os import sys -sys.path.insert(0, os.path.abspath('../../')) +sys.path.insert(0, os.path.abspath("../../")) import qcelemental - # -- Project information ----------------------------------------------------- -project = 'QCElemental' -copyright = f'2018-{datetime.datetime.today().year}, The Molecular Sciences Software Institute' -author = 'The QCArchive Development Team' +project = "QCElemental" +copyright = f"2018-{datetime.datetime.today().year}, The Molecular Sciences Software Institute" +author = "The QCArchive Development Team" # The short X.Y version version = qcelemental.__version__ @@ -43,28 +42,28 @@ # ones. extensions = [ # from Sphinx - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.viewcode', - 'sphinx.ext.extlinks', - 'sphinx.ext.graphviz', - 'sphinx.ext.autosummary', - 'sphinx.ext.napoleon', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.viewcode", + "sphinx.ext.extlinks", + "sphinx.ext.graphviz", + "sphinx.ext.autosummary", + "sphinx.ext.napoleon", # from Astropy - 'sphinx_automodapi.automodapi', - 'sphinx_automodapi.automodsumm', - 'sphinx_automodapi.smart_resolver', + "sphinx_automodapi.automodapi", + "sphinx_automodapi.automodsumm", + "sphinx_automodapi.smart_resolver", "sphinx_autodoc_typehints", "sphinxcontrib.autodoc_pydantic", ] autosummary_generate = True -automodapi_toctreedirnm = 'api' -#numpydoc_show_class_members = False -#automodsumm_inherited_members = True +automodapi_toctreedirnm = "api" +# numpydoc_show_class_members = False +# automodsumm_inherited_members = True autodoc_typehints = "description" napoleon_use_param = True napoleon_use_rtype = True @@ -73,16 +72,16 @@ autodoc_pydantic_field_swap_name_and_alias = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -97,7 +96,7 @@ exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'default' +pygments_style = "default" # -- Options for HTML output ------------------------------------------------- @@ -105,7 +104,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -116,7 +115,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -132,7 +131,7 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'qcelementaldoc' +htmlhelp_basename = "qcelementaldoc" # -- Options for LaTeX output ------------------------------------------------ @@ -141,15 +140,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -159,8 +155,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'QCElemental.tex', 'QCElemental Documentation', - author, 'manual'), + (master_doc, "QCElemental.tex", "QCElemental Documentation", author, "manual"), ] @@ -168,10 +163,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'qcelemental', 'QCElemental Documentation', - [author], 1) -] +man_pages = [(master_doc, "qcelemental", "QCElemental Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -180,30 +172,37 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'QCElemental', 'QCElemental Documentation', - author, 'QCElemental', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "QCElemental", + "QCElemental Documentation", + author, + "QCElemental", + "One line description of project.", + "Miscellaneous", + ), ] # -- Extension configuration ------------------------------------------------- extlinks = { - 'issue': ('https://github.com/MolSSI/QCElemental/issues/%s', 'GH#'), - 'pr': ('https://github.com/MolSSI/QCElemental/pull/%s', 'GH#') + "issue": ("https://github.com/MolSSI/QCElemental/issues/%s", "GH#"), + "pr": ("https://github.com/MolSSI/QCElemental/pull/%s", "GH#"), } # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'python': ('https://docs.python.org/3.10', None), - "numpy": ("https://numpy.org/doc/stable/", None), - 'scipy': ('https://docs.scipy.org/doc/scipy/', None), - 'matplotlib': ('https://matplotlib.org/stable/', None), - "qcengine": ("http://docs.qcarchive.molssi.org/projects/QCEngine/en/latest/", None), - "qcfractal": ("http://docs.qcarchive.molssi.org/projects/QCFractal/en/latest/", None), - } +intersphinx_mapping = { + "python": ("https://docs.python.org/3.10", None), + "numpy": ("https://numpy.org/doc/stable/", None), + "scipy": ("https://docs.scipy.org/doc/scipy/", None), + "matplotlib": ("https://matplotlib.org/stable/", None), + "qcengine": ("http://docs.qcarchive.molssi.org/projects/QCEngine/en/latest/", None), + "qcfractal": ("http://docs.qcarchive.molssi.org/projects/QCFractal/en/latest/", None), +} # -- Options for todo extension ---------------------------------------------- diff --git a/docs/source/covalent_radii.rst b/docs/covalent_radii.rst similarity index 100% rename from docs/source/covalent_radii.rst rename to docs/covalent_radii.rst diff --git a/docs/source/index.rst b/docs/index.rst similarity index 100% rename from docs/source/index.rst rename to docs/index.rst diff --git a/docs/source/install.rst b/docs/install.rst similarity index 100% rename from docs/source/install.rst rename to docs/install.rst diff --git a/docs/source/model_common.rst b/docs/model_common.rst similarity index 100% rename from docs/source/model_common.rst rename to docs/model_common.rst diff --git a/docs/source/model_molecule.rst b/docs/model_molecule.rst similarity index 100% rename from docs/source/model_molecule.rst rename to docs/model_molecule.rst diff --git a/docs/source/model_result.rst b/docs/model_result.rst similarity index 100% rename from docs/source/model_result.rst rename to docs/model_result.rst diff --git a/docs/source/models.rst b/docs/models.rst similarity index 100% rename from docs/source/models.rst rename to docs/models.rst diff --git a/docs/source/periodic_table.rst b/docs/periodic_table.rst similarity index 100% rename from docs/source/periodic_table.rst rename to docs/periodic_table.rst diff --git a/docs/source/physconst.rst b/docs/physconst.rst similarity index 100% rename from docs/source/physconst.rst rename to docs/physconst.rst diff --git a/docs/requirements.yml b/docs/requirements.yml deleted file mode 100644 index dfc32ab6..00000000 --- a/docs/requirements.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: qcelemental-docs -channels: - - defaults - - conda-forge -dependencies: - - python=3 - - numpy - - pydantic - - pint - - - sphinx - - sphinx_rtd_theme - - sphinx-automodapi - - sphinx-autodoc-typehints - - graphviz - - autodoc-pydantic - - #- pip: - # - git+https://github.com/MolSSI/qcarchive-sphinx-theme#egg=qcarchive_sphinx_theme diff --git a/docs/source/vanderwaals_radii.rst b/docs/vanderwaals_radii.rst similarity index 100% rename from docs/source/vanderwaals_radii.rst rename to docs/vanderwaals_radii.rst diff --git a/pyproject.toml b/pyproject.toml index 2f7cc3f4..752b8499 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,85 @@ +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + + +[tool.poetry] +name = "qcelemental" +version = "0.25.1" +description = "Core data structures for Quantum Chemistry." +authors = ["The QCArchive Development Team "] +license = "BSD-3-Clause" +readme = "README.md" +homepage = "https://github.com/MolSSI/QCElemental" +documentation = "http://docs.qcarchive.molssi.org/projects/qcelemental/en/latest/" +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Science/Research", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] + +[tool.poetry.dependencies] +numpy = [ + { version = ">=1.12.0", python = "3.8" }, + { version = ">=1.24.1", python = ">=3.9" }, +] +python = "^3.7" +pint = ">=0.10.0" +pydantic = ">=1.8.2" +nglview = { extras = ["viz"], version = "^3.0.3" } +ipykernel = { version = "<6.0.0", extras = ["viz"] } +importlib-metadata = { version = ">=4.8", python = "<3.8" } +networkx = { version = "<3.0", extras = ["align"] } +pytest = { extras = ["test"], version = "^7.2.2" } + + +[tool.poetry.group.dev.dependencies] +black = ">=23.1.0" +mypy = "^1.1.1" +isort = "5.11.5" +flake8 = "<6.0.0" +pre-commit = "<3.2.0" +pytest-cov = "^4.0.0" +autoflake = "^2.0.2" +jsonschema = "^4.17.3" +msgpack = "^1.0.5" +numpydoc = "^1.5.0" +docutils = "<0.19" +sphinx = "<6.0.0" +sphinxcontrib-napoleon = "^0.7" +sphinx-rtd-theme = "^1.2.0" +autodoc-pydantic = "^1.8.0" +sphinx-automodapi = "^0.15.0" +sphinx-autodoc-typehints = "^1.22" + [tool.black] line-length = 120 target-version = ['py37'] + + +[tool.isort] +force_grid_wrap = 0 +include_trailing_comma = true +line_length = 120 +multi_line_output = 3 +use_parentheses = true + +[tool.coverage.run] +branch = true +omit = ["*/tests/*", "*/migrations/*", "*site-packages*", "*__init__.py"] + +[tool.mypy] +plugins = "pydantic.mypy" +ignore_missing_imports = true + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true +warn_untyped_fields = true diff --git a/qcelemental/__init__.py b/qcelemental/__init__.py index 6cd89fda..a3c237ed 100644 --- a/qcelemental/__init__.py +++ b/qcelemental/__init__.py @@ -1,15 +1,15 @@ -""" -Main init for QCElemental -""" +# https://github.com/python-poetry/poetry/pull/2366#issuecomment-652418094 +try: + import importlib.metadata as importlib_metadata +except ModuleNotFoundError: + import importlib_metadata + +__version__ = importlib_metadata.version(__name__) # Handle singletons, not their classes or modules from . import covalent_radii, models, molparse, molutil, periodic_table, physical_constants, util, vanderwaals_radii from .datum import Datum from .exceptions import ChoicesError, DataUnavailableError, MoleculeFormatError, NotAnElementError, ValidationError - -# Handle versioneer -from .extras import get_information - from .testing import compare, compare_recursive, compare_values # Expose singletons from the modules @@ -26,7 +26,3 @@ del physical_constants del covalent_radii del vanderwaals_radii - -__version__ = get_information("version") -__git_revision__ = get_information("git_revision") -del get_information diff --git a/qcelemental/_version.py b/qcelemental/_version.py deleted file mode 100644 index bc3f4f19..00000000 --- a/qcelemental/_version.py +++ /dev/null @@ -1,533 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "None" - cfg.versionfile_source = "qcelemental/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None) - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): - root = os.path.dirname(root) - except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } diff --git a/qcelemental/conftest.py b/qcelemental/conftest.py index 6ed70d8a..e69de29b 100644 --- a/qcelemental/conftest.py +++ b/qcelemental/conftest.py @@ -1,40 +0,0 @@ -from pathlib import Path - -import pytest - - -def pytest_addoption(parser): - parser.addoption( - "--validate", action="store_true", help="validate JSON from previous test run against exported schema" - ) - - -@pytest.fixture(scope="session", autouse=True) -def set_up_overall(request): - # in all pytest runs except --validate (which uses the files), clear away the JSON examples and generate fresh - if not request.config.getoption("--validate", default=False): - _data_path = Path(__file__).parent.resolve() / "tests" / "qcschema_instances" - for fl in _data_path.rglob("*.json"): - fl.unlink() - - -def pytest_runtest_setup(item): - # there's a bug where can only set options if specify path in call, so needs to be ``pytest qcelemental/ --validate`` - - # skip the validate-generated-instances-against-exported-schema tests on most ``pytest`` runs. - # run only the validate-generated-instances-against-exported-schema tests on ``pytest --validate`` runs. - if not item.config.getoption("--validate", default=False) and item.name.startswith("test_qcschema"): - pytest.skip("can't run with --validate option") - elif item.config.getoption("--validate", default=False) and not item.name.startswith("test_qcschema"): - pytest.skip("need --validate option to run") - - -# Uncomment below to probe for tests needing `@using_web` - -# import socket -# -# class block_network(socket.socket): -# def __init__(self, *args, **kwargs): -# raise Exception("Network call blocked") -# -# socket.socket = block_network diff --git a/qcelemental/covalent_radii.py b/qcelemental/covalent_radii.py index 10110f1a..47f14be3 100644 --- a/qcelemental/covalent_radii.py +++ b/qcelemental/covalent_radii.py @@ -75,7 +75,7 @@ def __str__(self) -> str: def get( self, atom: Union[int, str], *, return_tuple: bool = False, units: str = "bohr", missing: float = None - ) -> Union[float, "Datum"]: # lgtm [py/similar-function] + ) -> Union[float, "Datum"]: r""" Access a covalent radius for species `atom`. @@ -145,7 +145,7 @@ def string_representation(self) -> str: return print_variables(self.cr) - def write_c_header(self, filename: str = "covrad.h", missing: float = 2.0) -> None: # lgtm[py/similar-function] + def write_c_header(self, filename: str = "covrad.h", missing: float = 2.0) -> None: r"""Write C header file defining covalent radii array. Parameters diff --git a/qcelemental/extras.py b/qcelemental/extras.py deleted file mode 100644 index 29e52a52..00000000 --- a/qcelemental/extras.py +++ /dev/null @@ -1,22 +0,0 @@ -""" -Misc information and runtime information. -""" - -from . import _version - -__all__ = ["get_information"] - -versions = _version.get_versions() - -__info = {"version": versions["version"], "git_revision": versions["full-revisionid"]} - - -def get_information(key: str): - """ - Obtains a variety of runtime information about QCElemental. - """ - key = key.lower() - if key not in __info: - raise KeyError(f"Information key '{key}' not understood.") - - return __info[key] diff --git a/qcelemental/info/cpu_info.py b/qcelemental/info/cpu_info.py index ce1c4a00..3dbad81f 100644 --- a/qcelemental/info/cpu_info.py +++ b/qcelemental/info/cpu_info.py @@ -101,7 +101,6 @@ def process_names(self, name): @lru_cache(maxsize=1024) def get(name: str, vendor=None, cutoff=0.9) -> ProcessorInfo: - name = context.process_names(name.split("@")[0]) if ("amd" in name) or (vendor == "amd"): diff --git a/qcelemental/info/dft_info.py b/qcelemental/info/dft_info.py index 89425015..41fa436d 100644 --- a/qcelemental/info/dft_info.py +++ b/qcelemental/info/dft_info.py @@ -62,7 +62,6 @@ def __str__(self) -> str: def get(name: str) -> DFTFunctionalInfo: - name = name.lower() for x in dftfunctionalinfo.suffixes: if name.endswith(x): diff --git a/qcelemental/models/__init__.py b/qcelemental/models/__init__.py index 447d5478..c17f2cdc 100644 --- a/qcelemental/models/__init__.py +++ b/qcelemental/models/__init__.py @@ -13,10 +13,12 @@ from .basis import BasisSet from .common_models import ComputeError, DriverEnum, FailedOperation, Provenance from .molecule import Molecule -from .procedures import OptimizationInput, OptimizationResult from .procedures import Optimization # scheduled for removal +from .procedures import OptimizationInput, OptimizationResult +from .results import Result # scheduled for removal +from .results import ResultInput # scheduled for removal +from .results import ResultProperties # scheduled for removal from .results import AtomicInput, AtomicResult, AtomicResultProperties -from .results import Result, ResultInput, ResultProperties # scheduled for removal def qcschema_models(): diff --git a/qcelemental/models/basis.py b/qcelemental/models/basis.py index 2f577b6d..e9b8dc6f 100644 --- a/qcelemental/models/basis.py +++ b/qcelemental/models/basis.py @@ -192,7 +192,6 @@ def _check_atom_map(cls, v, values): @validator("nbf", always=True) def _check_nbf(cls, v, values): - # Bad construction, pass on errors try: nbf = cls._calculate_nbf(values["atom_map"], values["center_data"]) diff --git a/qcelemental/models/molecule.py b/qcelemental/models/molecule.py index 5944001f..f014af75 100644 --- a/qcelemental/models/molecule.py +++ b/qcelemental/models/molecule.py @@ -350,6 +350,8 @@ def __init__(self, orient: bool = False, validate: Optional[bool] = None, **kwar kwargs = {**kwargs, **schema} # Allow any extra fields validate = True + if "extras" not in kwargs: + kwargs["extras"] = {} super().__init__(**kwargs) # We are pulling out the values *explicitly* so that the pydantic skip_defaults works as expected @@ -645,7 +647,6 @@ def get_fragment( atom_size = 0 if group_fragments: - # Loop through the real blocks frag_start = 0 for frag in real: @@ -1029,7 +1030,6 @@ def _orient_molecule_internal(self): geom_noise = 10 ** (-GEOMETRY_NOISE) for num in range(new_geometry.shape[0]): - for x in range(3): if phase_check[x]: continue diff --git a/qcelemental/models/procedures.py b/qcelemental/models/procedures.py index 631f537d..d1affe75 100644 --- a/qcelemental/models/procedures.py +++ b/qcelemental/models/procedures.py @@ -111,7 +111,6 @@ class OptimizationResult(OptimizationInput): @validator("trajectory", each_item=False) def _trajectory_protocol(cls, v, values): - # Do not propogate validation errors if "protocols" not in values: raise ValueError("Protocols was not properly formed.") diff --git a/qcelemental/models/results.py b/qcelemental/models/results.py index e36e2f7d..d5fb8d54 100644 --- a/qcelemental/models/results.py +++ b/qcelemental/models/results.py @@ -1,6 +1,6 @@ from enum import Enum from functools import partial -from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Set, Union import numpy as np from pydantic import Field, constr, validator @@ -16,6 +16,121 @@ from pydantic.typing import ReprArgs +class MolecularDescriptors(ProtoModel): + r""" + Molecular descriptors for cheminformatics and AI/ML applications following the MolSSI QCSchema. + """ + + canonical_smiles: Optional[str] = Field( + None, description="Canonical (non-kekulized, implicit hydrogens) SMILES string representing the molecule." + ) + inchi_key: Optional[str] = Field(None, description="Internet searchable InChI hash.") + inchi: Optional[str] = Field(None, description="InChI string for molecule.") + num_val_e: Optional[int] = Field(None, description="Valence electron count") + num_rad_e: Optional[int] = Field(None, description="Radical electron count.") + num_hetero: Optional[int] = Field(None, description="Number of heteroatoms (not C/H).") + num_no: Optional[int] = Field(None, description="Number of N/O atoms.") + num_nhoh: Optional[int] = Field(None, description="Number of NH/OH groups.") + num_h_acceptors: Optional[int] = Field(None, description="Number of hydrogen-bond acceptors.") + num_h_donors: Optional[int] = Field(None, description="Number of hydrogen-bond donors.") + num_rot_bonds: Optional[int] = Field(None, description="Number of (relatively) freely rotating bonds.") + num_spiro_atoms: Optional[int] = Field(None, description="Number of spiro atoms.") + num_bridgehead_atoms: Optional[int] = Field(None, description="Number of bridgehead atoms") + atomic_charges: Optional[Dict[str, List[float]]] = Field( + None, + description="Calculated atomic charges where 'gasteiger' is following method in (Gasteiger, J.; Marseli, M. Tetrahedron, 1980), and 'eem' is Electronegativity Equalization Method following (Mortier, W.J.; Van Genechten, K.; Gasteiger, J. JACS, 1985 and Mortier, W.J.; Ghosh, S.K.; Shankar, S. JACS, 1986).", + ) + stereochemistry: Optional[List[Union[Tuple[int, str], Tuple[int, int, str]]]] = Field( + None, description="CIP (R/S, E/Z) stereochemistry by atom indices and stereo type." + ) + aromaticity: Optional[Dict[str, List[Union[int, Tuple[int, int]]]]] = Field( + None, description="Dictionary of bond indices and atom indices of aromatic bonds." + ) + ring_info: Optional[Dict[str, Union[int, Dict[int, Dict[str, Union[int, bool, List[int]]]]]]] = Field( + None, + description="Ring counts by type info for each ring where the key is the ring index and contains the size, whether it's aromatic(bool) and/or a heterocycle(bool), and atom indices.", + ) + logp: Optional[float] = Field( + None, + description="Octanol-water partition coefficient approximation from Wildman, S.A.; Crippen, G.M. J. Chem. Inf. Comput. Sci., 1999.", + ) + mol_refract: Optional[float] = Field( + None, + description="Molar refractivity in m^3/mol from Wildman, S.A.; Crippen, G.M. J. Chem. Inf. Comput. Sci., 1999.", + ) + surface_area: Optional[Dict[str, float]] = Field( + None, + description="Surface area approximations by different methods, containing Topological Polar Surface Area(TPSA) (Ertl P. et al, J. Med. Chem., 2000), the CCG approximations Labute ASA, PEOE VSA, SMR VSA, SlogP VSA and EState VSA (Labute, P. J. Mol. Graph. Model., 2000), an EState VSA variant developed by RDKit, and an average of the VSA methods.", + ) + plane_best_fit: Optional[float] = Field( + None, + description="Average distance in Angstrom from the plane of best fit of a molecule's atoms (Firth, N.C.; Brown, N.; Blagg, J. J. Chem. Inf. Model., 2012).", + ) + mo_inertia: Optional[Dict[str, List[float]]] = Field( + None, + description="Principal and normalized moments of inertia as described in (Sauer, W.H.B.; Schwarz, M.K. J. Chem. Inf. Comput. Sci., 2003).", + ) + rad_gyration: Optional[float] = Field( + None, + description="Radius of gyration from Arteca, G.A. 'Molecular Shape Descriptors' in Reviews in Computational Chemistry vol. 9.", + ) + inertial_shape_factor: Optional[float] = Field( + None, + description="From Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + eccentricity: Optional[float] = Field( + None, + description="Elliptical eccentricities derived from principal inertia moments, from Arteca, G.A. 'Molecular Shape Descriptors' in Reviews in Computational Chemistry vol. 9.", + ) + asphericity: Optional[float] = Field( + None, + description="Molecular asphericity derived from principal moments of inertia, from Baumgartner, A. J. Chem. Phys. 1993.", + ) + spherocity_idx: Optional[float] = Field( + None, + description="A function of the eigenvalues of the covariance matrix of atomic coordinates, from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + balaban_idx: Optional[float] = Field( + None, description="2D Topological index from Balaban, J. Chem. Phys. Lett. 1982." + ) + bertz_ct: Optional[float] = Field(None, description="2D topological index from Bertz, S.H. JACS, 1981.") + ipc_idx: Optional[float] = Field( + None, description="2D topological index from Bonchev, D.; Trinajstic, N. J. Chem. Phys. 1977." + ) + hall_kier_parameters: Optional[Dict[str, float]] = Field( + None, + description="A set of 2D topological indexes from Hall, L.H.; Kier, L.B. 'The Molecular Connectivity Chi Indexes and ...' in Reviews in Computational Chemistry Vol. 2, also including Phi index from Kier, L.B. Mol. Inform., 1989 (journal previously called Quant. Struct.-Act. Relat.)", + ) + bcut2D: Optional[Dict[str, float]] = Field( + None, + description="High and low eigenvalues of topologically weighted matrices where an atomic property (charge, polarizability, etc) fills diagonal as a non-fingerprint based measure of similarity, from Pearlman, R.S.; Smith,K.M. 'Novel Software Tools for Chemical Diversity' in 3D QSAR in Drug Design Vol. 2.", + ) + autocorr2D: Optional[List[float]] = Field( + None, + description="2D topological based autocorrelation of atomic physicochemical properties from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + autocorr3D: Optional[List[float]] = Field( + None, + description="3D geometry based autocorrelation of atomic physicochemical properties from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + morse: Optional[List[float]] = Field( + None, + description="3D Molecule Representation of Structures based on Electron diffraction (MoRSE) from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + rdf: Optional[List[float]] = Field( + None, + description="Radial Distance Function (RDF) descriptors from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + whim: Optional[List[float]] = Field( + None, + description="Weighted Holistic Invariant Molecular (WHIM) descriptors, based on projections of atoms along the principal axis, from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + getaway: Optional[List[float]] = Field( + None, + description="GEometry, Topology, and Atom-Weights AssemblY (GETAWAY) 3D descriptors, based on statistical analysis of Molecular Influence Matrix, from Todeschini, R.; Consonni, V. 'Descriptors from Molecular Geometry' in Handbook of Chemoinformatics.", + ) + + class AtomicResultProperties(ProtoModel): r""" Named properties of quantum chemistry computations following the MolSSI QCSchema. @@ -31,7 +146,9 @@ class AtomicResultProperties(ProtoModel): calcinfo_nmo: Optional[int] = Field(None, description="The number of molecular orbitals for the computation.") calcinfo_nalpha: Optional[int] = Field(None, description="The number of alpha electrons in the computation.") calcinfo_nbeta: Optional[int] = Field(None, description="The number of beta electrons in the computation.") - calcinfo_natom: Optional[int] = Field(None, description="The number of atoms in the computation.") + calcinfo_natom: Optional[int] = Field( + None, description="The number of atoms in the computation." + ) # perhaps this shouldn't be optional # Canonical nuclear_repulsion_energy: Optional[float] = Field(None, description="The nuclear repulsion energy.") @@ -50,6 +167,9 @@ class AtomicResultProperties(ProtoModel): units="E_h/a0^2", ) + # Cheminformatics (currently just rdkit) Keywords + descriptors: Optional[MolecularDescriptors] = Field(None, description=MolecularDescriptors.__doc__) + # SCF Keywords scf_one_electron_energy: Optional[float] = Field( None, @@ -433,7 +553,6 @@ class Config(ProtoModel.Config): @validator("scf_eigenvalues_a", "scf_eigenvalues_b", "scf_occupations_a", "scf_occupations_b") def _assert1d(cls, v, values): - try: v = v.reshape(-1) except (ValueError, AttributeError): @@ -491,7 +610,6 @@ def _assert2d(cls, v, values): "occupations_b", ) def _assert_exists(cls, v, values): - if values.get(v, None) is None: raise ValueError(f"Return quantity {v} does not exist in the values.") return v @@ -651,7 +769,6 @@ def _validate_return_result(cls, v, values): @validator("wavefunction", pre=True) def _wavefunction_protocol(cls, value, values): - # We are pre, gotta do extra checks if value is None: return value @@ -720,7 +837,6 @@ def _wavefunction_protocol(cls, value, values): @validator("stdout") def _stdout_protocol(cls, value, values): - # Do not propagate validation errors if "protocols" not in values: raise ValueError("Protocols was not properly formed.") @@ -735,7 +851,6 @@ def _stdout_protocol(cls, value, values): @validator("native_files") def _native_file_protocol(cls, value, values): - ancp = values["protocols"].native_files if ancp == "all": return value diff --git a/qcelemental/molparse/from_arrays.py b/qcelemental/molparse/from_arrays.py index 1b6f8092..747094a1 100644 --- a/qcelemental/molparse/from_arrays.py +++ b/qcelemental/molparse/from_arrays.py @@ -462,7 +462,7 @@ def validate_provenance(dicary): if connectivity is not None: conn = [] try: - for (at1, at2, bondorder) in connectivity: + for at1, at2, bondorder in connectivity: if not (float(at1)).is_integer() or at1 < 0: # or at1 >= nat: raise ValidationError("""Connectivity first atom should be int [0, nat): {}""".format(at1)) if not (float(at2)).is_integer() or at2 < 0: # or at2 >= nat: @@ -502,7 +502,6 @@ def validate_provenance(dicary): def validate_and_fill_frame(extern, fix_com=None, fix_orientation=None, fix_symmetry=None): - if fix_com is True: com = True elif fix_com is False: @@ -549,7 +548,6 @@ def validate_and_fill_frame(extern, fix_com=None, fix_orientation=None, fix_symm def validate_and_fill_efp(fragment_files=None, hint_types=None, geom_hints=None): - if ( fragment_files is None or hint_types is None @@ -559,7 +557,6 @@ def validate_and_fill_efp(fragment_files=None, hint_types=None, geom_hints=None) or geom_hints == [None] or not (len(fragment_files) == len(hint_types) == len(geom_hints)) ): - raise ValidationError( """Missing or inconsistent length among efp quantities: fragment_files ({}), hint_types ({}), and geom_hints ({})""".format( fragment_files, hint_types, geom_hints diff --git a/qcelemental/molparse/pubchem.py b/qcelemental/molparse/pubchem.py index b1ab05a9..ab104048 100644 --- a/qcelemental/molparse/pubchem.py +++ b/qcelemental/molparse/pubchem.py @@ -93,7 +93,6 @@ def get_cartesian(self): atom_count = 0 for line in lines: - atom_match = atom_re.match(line) if atom_match: x = float(atom_match.group(1)) diff --git a/qcelemental/molparse/to_string.py b/qcelemental/molparse/to_string.py index f95e7a4b..08afb6a0 100644 --- a/qcelemental/molparse/to_string.py +++ b/qcelemental/molparse/to_string.py @@ -142,7 +142,6 @@ def to_dict(self) -> Dict: smol.extend(atoms) elif dtype == "orca": - atom_format = "{elem}" ghost_format = "{elem}:" umap = {"bohr": "! Bohrs", "angstrom": "!"} @@ -181,7 +180,6 @@ def to_dict(self) -> Dict: } elif dtype == "molpro": - atom_format = "{elem}" ghost_format = "{elem}" umap = {"bohr": "bohr", "angstrom": "angstrom"} @@ -220,7 +218,6 @@ def to_dict(self) -> Dict: smol.append(f"set,spin={molrec['molecular_multiplicity']-1}") elif dtype == "nwchem": - atom_format = "{elem}{elbl}" ghost_format = "bq{elem}{elbl}" # TODO handle which units valid @@ -248,7 +245,6 @@ def to_dict(self) -> Dict: data.keywords["mcscf__multiplicity"] = molrec["molecular_multiplicity"] elif dtype == "madness": - atom_format = "{elem}" ghost_format = "GH" # TODO handle which units valid @@ -304,7 +300,6 @@ def to_dict(self) -> Dict: } elif dtype == "terachem": - atom_format = "{elem}" ghost_format = "X{elem}" umap = {"bohr": "au", "angstrom": ""} @@ -316,7 +311,6 @@ def to_dict(self) -> Dict: smol.extend(atoms) elif dtype == "psi4": - atom_format = "{elem}{elbl}" ghost_format = "Gh({elem}{elbl})" umap = {"bohr": "bohr", "angstrom": "angstrom"} @@ -392,7 +386,6 @@ def to_dict(self) -> Dict: smol.append(f" {(a1 + 1):2d} {(a2 + 1):2d} {int(b):1d} 0 0 0 0") elif dtype == "qchem": - atom_format = "{elem}" ghost_format = "@{elem}" umap = {"bohr": "True", "angstrom": "False"} diff --git a/qcelemental/molutil/__init__.py b/qcelemental/molutil/__init__.py index 9cc24016..507cf81d 100644 --- a/qcelemental/molutil/__init__.py +++ b/qcelemental/molutil/__init__.py @@ -1,3 +1,3 @@ from .align import B787, compute_scramble, kabsch_align from .connectivity import guess_connectivity -from .molecular_formula import order_molecular_formula, molecular_formula_from_symbols +from .molecular_formula import molecular_formula_from_symbols, order_molecular_formula diff --git a/qcelemental/molutil/test_molutil.py b/qcelemental/molutil/test_molutil.py index bf8cf012..1cc88fdc 100644 --- a/qcelemental/molutil/test_molutil.py +++ b/qcelemental/molutil/test_molutil.py @@ -572,7 +572,6 @@ def test_vector_gradient_align(): ], ) def test_guess_connectivity(args, kwargs, ans): - computed = qcel.molutil.guess_connectivity(*args, **kwargs) assert compare(computed, ans) diff --git a/qcelemental/periodic_table.py b/qcelemental/periodic_table.py index a111e470..1cad4054 100644 --- a/qcelemental/periodic_table.py +++ b/qcelemental/periodic_table.py @@ -40,7 +40,6 @@ class PeriodicTable: """ def __init__(self): - from . import data # Of length number of elements diff --git a/qcelemental/physical_constants/ureg.py b/qcelemental/physical_constants/ureg.py index ac67d063..a306af5e 100644 --- a/qcelemental/physical_constants/ureg.py +++ b/qcelemental/physical_constants/ureg.py @@ -96,7 +96,6 @@ def build_units_registry(context): _nist_units = set() for k, v in phys_const.items(): - # Automatically builds the following: # electron_volt_to_kelvin = 1.16045221e4 / electron_volt * kelvin # hartree_to_atomic_mass_unit = 2.9212623197e-8 / hartree * atomic_mass_unit @@ -157,7 +156,6 @@ def build_transformer(right_unit, default): """ def transformer(ureg, val): - left_unit = _find_nist_unit(val) if left_unit is None: return val * ureg.parse_expression(default) diff --git a/qcelemental/testing.py b/qcelemental/testing.py index a8e60db8..1b4a0239 100644 --- a/qcelemental/testing.py +++ b/qcelemental/testing.py @@ -128,7 +128,7 @@ def compare_values( f"""\t{label}: computed shape ({cptd.shape}) does not match ({xptd.shape}).""", return_message, quiet, - ) # lgtm: [py/syntax-error] + ) digits1 = abs(int(np.log10(atol))) + 2 digits_str = f"to atol={atol}" @@ -303,7 +303,6 @@ def compare( def _compare_recursive(expected, computed, atol, rtol, _prefix=False, equal_phase=False): - errors = [] name = _prefix or "root" prefix = name + "." diff --git a/qcelemental/util/test_gph_uno_bipartite.py b/qcelemental/tests/test_gph_uno_bipartite.py similarity index 99% rename from qcelemental/util/test_gph_uno_bipartite.py rename to qcelemental/tests/test_gph_uno_bipartite.py index 5b1ff027..eb977ae3 100644 --- a/qcelemental/util/test_gph_uno_bipartite.py +++ b/qcelemental/tests/test_gph_uno_bipartite.py @@ -6,12 +6,11 @@ from qcelemental.util.gph_uno_bipartite import _enumMaximumMatching, _enumMaximumMatching2, uno -from ..tests.addons import using_networkx, using_scipy +from .addons import using_networkx, using_scipy @using_networkx def test_example4(alg=1): - # fmt: off edges = [(0, 0), (0, 1), @@ -78,7 +77,6 @@ def test_example4(alg=1): @using_networkx def test_example3(alg=1): - # fmt: off match = [(1, 2), (3, 4), (5, 6), (7, 8)] edges = [(1, 2), @@ -113,7 +111,6 @@ def test_example3(alg=1): def _check(msg, ans, ref, verbose=1): - tans = [tuple(qw) for qw in ans] tref = [tuple(qw) for qw in ref] extra_answers = set(tans).difference(set(tref)) diff --git a/qcelemental/tests/test_importing.py b/qcelemental/tests/test_importing.py index a64a2b21..7bfcda97 100644 --- a/qcelemental/tests/test_importing.py +++ b/qcelemental/tests/test_importing.py @@ -147,13 +147,3 @@ def test_which_f_raisemsg(): qcel.util.which("evills", raise_error=True, raise_msg="Install `evills`.") assert str(e.value).endswith("Command 'evills' not found in envvar PATH. Install `evills`.") - - -def test_safe_version(): - assert "v" + qcel.util.safe_version(qcel.__version__) == qcel.__version__ - - -def test_parse_version(): - import pydantic - - assert qcel.util.parse_version(str(pydantic.VERSION)) >= qcel.util.parse_version("v0.20") diff --git a/qcelemental/tests/test_info.py b/qcelemental/tests/test_info.py index c3e701aa..31c0b91f 100644 --- a/qcelemental/tests/test_info.py +++ b/qcelemental/tests/test_info.py @@ -60,14 +60,12 @@ def test_cpu_info_index_lengths(): ], ) def test_cpu_info_search(name, model): - cpu = cpu_info.get(name) assert cpu is not None, name assert cpu.model == model, name def test_cpu_info_errors(): - with pytest.raises(KeyError) as exc: cpu_info.get("E7-8867 V4") diff --git a/qcelemental/tests/test_model_general.py b/qcelemental/tests/test_model_general.py index f9f3b658..538d6886 100644 --- a/qcelemental/tests/test_model_general.py +++ b/qcelemental/tests/test_model_general.py @@ -2,12 +2,9 @@ from qcelemental.models import ( AtomicInput, - AtomicResult, AtomicResultProperties, ComputeError, FailedOperation, - Molecule, - Optimization, OptimizationInput, ProtoModel, Provenance, @@ -17,7 +14,6 @@ def test_result_properties_default_skip(request): - obj = AtomicResultProperties(scf_one_electron_energy="-5.0") drop_qcsk(obj, request.node.name) @@ -35,7 +31,6 @@ def test_result_properties_default_repr(): def test_repr_provenance(request): - prov = Provenance(creator="qcel", version="v0.3.2") drop_qcsk(prov, request.node.name) @@ -59,7 +54,6 @@ def test_repr_failed_op(): def test_repr_result(request): - result = AtomicInput( **{"driver": "gradient", "model": {"method": "UFF"}, "molecule": {"symbols": ["He"], "geometry": [0, 0, 0]}} ) @@ -70,7 +64,6 @@ def test_repr_result(request): def test_repr_optimization(): - opt = OptimizationInput( **{ "input_specification": {"driver": "gradient", "model": {"method": "UFF"}}, diff --git a/qcelemental/tests/test_model_results.py b/qcelemental/tests/test_model_results.py index b77dfdd2..2f63036e 100644 --- a/qcelemental/tests/test_model_results.py +++ b/qcelemental/tests/test_model_results.py @@ -164,7 +164,6 @@ def native_data_fixture(result_data_fixture): @pytest.fixture(scope="function") def optimization_data_fixture(result_data_fixture): - trajectory = [] energies = [] for x in range(5): @@ -249,7 +248,6 @@ def test_basis_ecp_center_raises(): def test_basis_map_raises(): - with pytest.raises(ValueError) as e: assert basis.BasisSet(name="custom_basis", center_data=center_data, atom_map=["something_odd"]) @@ -274,7 +272,6 @@ def test_wavefunction_build(wavefunction_data_fixture, request): def test_wavefunction_matrix_size_error(wavefunction_data_fixture): - wavefunction_data_fixture["wavefunction"]["scf_orbitals_a"] = np.random.rand(2, 2) with pytest.raises(ValueError) as e: qcel.models.AtomicResult(**wavefunction_data_fixture) @@ -283,7 +280,6 @@ def test_wavefunction_matrix_size_error(wavefunction_data_fixture): def test_wavefunction_return_result_pointer(wavefunction_data_fixture): - del wavefunction_data_fixture["wavefunction"]["scf_orbitals_a"] with pytest.raises(ValueError) as e: qcel.models.AtomicResult(**wavefunction_data_fixture) @@ -314,7 +310,6 @@ def test_wavefunction_return_result_pointer(wavefunction_data_fixture): ], ) def test_wavefunction_protocols(protocol, restricted, provided, expected, wavefunction_data_fixture, request): - wfn_data = wavefunction_data_fixture["wavefunction"] if protocol is None: @@ -355,7 +350,6 @@ def test_wavefunction_protocols(protocol, restricted, provided, expected, wavefu ], ) def test_native_protocols(protocol, provided, expected, native_data_fixture, request): - native_data = native_data_fixture["native_files"] if protocol is None: @@ -382,7 +376,6 @@ def test_native_protocols(protocol, provided, expected, native_data_fixture, req [(None, [0, 1, 2, 3, 4]), ("all", [0, 1, 2, 3, 4]), ("initial_and_final", [0, 4]), ("final", [4]), ("none", [])], ) def test_optimization_trajectory_protocol(keep, indices, optimization_data_fixture): - if keep is not None: optimization_data_fixture["protocols"] = {"trajectory": keep} opt = qcel.models.OptimizationResult(**optimization_data_fixture) @@ -500,7 +493,6 @@ def test_result_derivatives_array(request): "smodel", ["molecule", "atomicresultproperties", "atomicinput", "atomicresult", "optimizationresult"] ) def test_model_dictable(result_data_fixture, optimization_data_fixture, smodel): - if smodel == "molecule": model = qcel.models.Molecule data = result_data_fixture["molecule"].dict() @@ -526,7 +518,6 @@ def test_model_dictable(result_data_fixture, optimization_data_fixture, smodel): def test_result_model_deprecations(result_data_fixture, optimization_data_fixture): - with pytest.warns(DeprecationWarning): qcel.models.ResultProperties(scf_one_electron_energy="-5.0") diff --git a/qcelemental/tests/test_molecule.py b/qcelemental/tests/test_molecule.py index 05b2180a..5d4c07d8 100644 --- a/qcelemental/tests/test_molecule.py +++ b/qcelemental/tests/test_molecule.py @@ -63,7 +63,9 @@ def test_molecule_data_constructor_dict(): assert water_psi == water_from_json assert water_psi == Molecule.from_data(water_psi.to_string("psi4"), dtype="psi4") - assert water_psi.get_hash() == "3c4b98f515d64d1adc1648fe1fe1d6789e978d34" # copied from schema_version=1 + assert ( + water_psi.get_hash() == "3c4b98f515d64d1adc1648fe1fe1d6789e978d34" # pragma: allowlist secret + ) # copied from schema_version=1 assert water_psi.schema_version == 2 assert water_psi.schema_name == "qcschema_molecule" @@ -90,16 +92,16 @@ def test_hash_canary(): """, dtype="psi4", ) - assert water_dimer_minima.get_hash() == "42f3ac52af52cf2105c252031334a2ad92aa911c" + assert water_dimer_minima.get_hash() == "42f3ac52af52cf2105c252031334a2ad92aa911c" # pragma: allowlist secret # Check orientation mol = water_dimer_minima.orient_molecule() - assert mol.get_hash() == "632490a0601500bfc677e9277275f82fbc45affe" + assert mol.get_hash() == "632490a0601500bfc677e9277275f82fbc45affe" # pragma: allowlist secret frag_0 = mol.get_fragment(0, orient=True) frag_1 = mol.get_fragment(1, orient=True) - assert frag_0.get_hash() == "d0b499739f763e8d3a5556b4ddaeded6a148e4d5" - assert frag_1.get_hash() == "bdc1f75bd1b7b999ff24783d7c1673452b91beb9" + assert frag_0.get_hash() == "d0b499739f763e8d3a5556b4ddaeded6a148e4d5" # pragma: allowlist secret + assert frag_1.get_hash() == "bdc1f75bd1b7b999ff24783d7c1673452b91beb9" # pragma: allowlist secret def test_molecule_np_constructors(): @@ -168,16 +170,15 @@ def test_water_minima_data(): [-3.27523824, 0.81341093, -1.43347255], ], ) - assert mol.get_hash() == "3c4b98f515d64d1adc1648fe1fe1d6789e978d34" + assert mol.get_hash() == "3c4b98f515d64d1adc1648fe1fe1d6789e978d34" # pragma: allowlist secret def test_water_minima_fragment(): - mol = water_dimer_minima.copy() frag_0 = mol.get_fragment(0, orient=True) frag_1 = mol.get_fragment(1, orient=True) - assert frag_0.get_hash() == "5f31757232a9a594c46073082534ca8a6806d367" - assert frag_1.get_hash() == "bdc1f75bd1b7b999ff24783d7c1673452b91beb9" + assert frag_0.get_hash() == "5f31757232a9a594c46073082534ca8a6806d367" # pragma: allowlist secret + assert frag_1.get_hash() == "bdc1f75bd1b7b999ff24783d7c1673452b91beb9" # pragma: allowlist secret frag_0_1 = mol.get_fragment(0, 1) frag_1_0 = mol.get_fragment(1, 0) @@ -193,13 +194,11 @@ def test_water_minima_fragment(): def test_pretty_print(): - mol = water_dimer_minima.copy() assert isinstance(mol.pretty_print(), str) def test_to_string(): - mol = water_dimer_minima.copy() assert isinstance(mol.to_string("psi4"), str) @@ -209,7 +208,6 @@ def test_to_string(): [("json", "json"), ("xyz", "xyz"), ("numpy", "npy"), pytest.param("msgpack", "msgpack", marks=using_msgpack)], ) def test_to_from_file_simple(tmp_path, dtype, filext): - benchmol = Molecule.from_data( """ O 0 0 0 @@ -228,7 +226,6 @@ def test_to_from_file_simple(tmp_path, dtype, filext): @pytest.mark.parametrize("dtype", ["json", "psi4"]) def test_to_from_file_complex(tmp_path, dtype): - p = tmp_path / ("water." + dtype) water_dimer_minima.to_file(p) @@ -240,7 +237,6 @@ def test_to_from_file_complex(tmp_path, dtype): "dtype, filext", [("json", "json"), ("xyz+", "xyz"), pytest.param("msgpack", "msgpack", marks=using_msgpack)] ) def test_to_from_file_charge_spin(tmp_path, dtype, filext): - benchmol = Molecule.from_data( """ 1 2 @@ -486,7 +482,7 @@ def test_get_fragment(group_fragments, orient): assert dimers[1].get_hash() == dimers[4].get_hash() assert dimers[2].get_hash() == dimers[5].get_hash() else: - assert 0 # lgtm: [py/unreachable-statement] + assert 0 ghdimers_nelectrons = [2, 2, 10, 10, 10, 10] ghdimers_nre = [0.0, 0.0, 9.163830150548483, 9.163830150548483, 9.163830150548483, 9.163830150548483] @@ -506,11 +502,10 @@ def test_get_fragment(group_fragments, orient): assert ghdimers[2].get_hash() != ghdimers[5].get_hash() # real pattern different assert not np.allclose(ghdimers[2].real, ghdimers[5].real) else: - assert 0 # lgtm: [py/unreachable-statement] + assert 0 def test_molecule_repeated_hashing(): - mol = Molecule( **{ "symbols": ["H", "O", "O", "H"], @@ -544,7 +539,6 @@ def test_molecule_repeated_hashing(): ], ) def test_measurements(measure, result): - Molecule( **{ "symbols": ["H", "O", "O", "H"], @@ -572,7 +566,6 @@ def test_measurements(measure, result): ], ) def test_fragment_charge_configurations(f1c, f1m, f2c, f2m, tc, tm): - mol = Molecule.from_data( """ {f1c} {f1m} @@ -604,7 +597,6 @@ def test_fragment_charge_configurations(f1c, f1m, f2c, f2m, tc, tm): def test_nuclearrepulsionenergy_nelectrons(): - mol = Molecule.from_data( """ 0 1 @@ -645,7 +637,6 @@ def test_nuclearrepulsionenergy_nelectrons(): @using_nglview def test_show(): - water_dimer_minima.show() @@ -683,7 +674,6 @@ def test_orient_nomasses(): ], ) def test_sparse_molecule_fields(mol_string, extra_keys): - expected_keys = { "schema_name", "schema_version", @@ -696,6 +686,7 @@ def test_sparse_molecule_fields(mol_string, extra_keys): "fix_com", "fix_orientation", "provenance", + "extras", } mol = Molecule.from_data(mol_string) @@ -734,3 +725,11 @@ def test_nonphysical_spec(): assert compare_values([100.0], mol.masses, "nonphysical mass") print(mol.to_string(dtype="psi4")) + + +def test_extras(): + mol = qcel.models.Molecule(symbols=["He"], geometry=[0, 0, 0]) + assert mol.extras is not None + + mol = qcel.models.Molecule(symbols=["He"], geometry=[0, 0, 0], extras={"foo": "bar"}) + assert mol.extras["foo"] == "bar" diff --git a/qcelemental/tests/test_molparse_from_string.py b/qcelemental/tests/test_molparse_from_string.py index 9f5ed809..871cc0eb 100644 --- a/qcelemental/tests/test_molparse_from_string.py +++ b/qcelemental/tests/test_molparse_from_string.py @@ -174,7 +174,6 @@ def test_psi4_qm_1e(): def test_psi4_qm_1f(): - qcelemental.molparse.from_arrays( geom=np.array([0.0, 0.0, 0.0, 1.0, 0.0, 0.0]), elez=np.array([8, 1]), @@ -185,7 +184,6 @@ def test_psi4_qm_1f(): def test_psi4_qm_iutautoobig_error_1g(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( geom=np.array([0.0, 0.0, 0.0, 1.0, 0.0, 0.0]), @@ -644,7 +642,6 @@ def test_psi4_qmefpformat_error_6c(): def test_qmefp_array_6d(): - fullans = copy.deepcopy(fullans6) fullans["qm"]["provenance"] = _arrays_prov_stamp fullans["efp"]["provenance"] = _arrays_prov_stamp @@ -666,7 +663,6 @@ def test_qmefp_array_6d(): def test_qmefp_badhint_error_6e(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -684,7 +680,6 @@ def test_qmefp_badhint_error_6e(): def test_qmefp_badefpgeom_error_6f(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -702,7 +697,6 @@ def test_qmefp_badefpgeom_error_6f(): def test_qmefp_badhintgeom_error_6g(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -720,7 +714,6 @@ def test_qmefp_badhintgeom_error_6g(): def test_qmefp_badfragfile_error_6h(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -738,7 +731,6 @@ def test_qmefp_badfragfile_error_6h(): def test_qmefp_hintlen_error_6i(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -756,7 +748,6 @@ def test_qmefp_hintlen_error_6i(): def test_qmefp_fixcom_error_6j(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -775,7 +766,6 @@ def test_qmefp_fixcom_error_6j(): def test_qmefp_fixori_error_6k(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_input_arrays( units="Bohr", @@ -1535,7 +1525,6 @@ def test_steepzmat_error(): def test_zmatvar_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", elem=["Rn", "Rn"], variables=[["bond", 2.0, "badextra"]], geom_unsettled=[[], ["1", "bond"]] @@ -1545,7 +1534,6 @@ def test_zmatvar_error(): def test_toomanyfrag_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1563,7 +1551,6 @@ def test_toomanyfrag_error(): def test_fragsep_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1650,7 +1637,6 @@ def test_natom_error(): def test_incompletefrag_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1667,7 +1653,6 @@ def test_incompletefrag_error(): def test_badmult_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1685,7 +1670,6 @@ def test_badmult_error(): def test_badchg_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1703,7 +1687,6 @@ def test_badchg_error(): def test_fraglen_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( domain="qmvz", @@ -1926,7 +1909,6 @@ def test_connectivity_17b(): def test_connectivity_atindex_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( geom=np.arange(9), @@ -1941,7 +1923,6 @@ def test_connectivity_atindex_error(): def test_connectivity_atrange_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( geom=np.arange(9), @@ -1956,7 +1937,6 @@ def test_connectivity_atrange_error(): def test_connectivity_bondorder_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( geom=np.arange(9), @@ -1971,7 +1951,6 @@ def test_connectivity_bondorder_error(): def test_connectivity_type_error(): - with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.from_arrays( geom=np.arange(9), diff --git a/qcelemental/tests/test_molparse_pubchem.py b/qcelemental/tests/test_molparse_pubchem.py index b371d989..d075e302 100644 --- a/qcelemental/tests/test_molparse_pubchem.py +++ b/qcelemental/tests/test_molparse_pubchem.py @@ -158,7 +158,10 @@ def test_pubchem_multiout_g(): qcelemental.molparse.from_string(subject, return_processed=True) except qcelemental.ChoicesError as e: assert e.choices[10789] == "2-hydroxycyclohepta-2,4,6-trien-1-one" - assert e.choices[193687] == "2-hydroxy-3-iodo-6-propan-2-ylcyclohepta-2,4,6-trien-1-one" + # NOTE: 193687 no longer returned by PubChem. Adding a new choice + # this is a brittle test due to changes on PubChem's end. + # assert e.choices[193687] == "2-hydroxy-3-iodo-6-propan-2-ylcyclohepta-2,4,6-trien-1-one" + assert e.choices[85783535] == "(4-hydroxy-5-oxocyclohepta-1,3,6-trien-1-yl) dodecanoate" subject13 = """pubchem :ammonium\n""" diff --git a/qcelemental/tests/test_molparse_to_schema.py b/qcelemental/tests/test_molparse_to_schema.py index 72cdc39a..9835200c 100644 --- a/qcelemental/tests/test_molparse_to_schema.py +++ b/qcelemental/tests/test_molparse_to_schema.py @@ -104,7 +104,6 @@ def test_psi4_14c(): def test_dtype_error(): - final = qcelemental.molparse.from_string(subject14) with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.to_schema(final["qm"], dtype="xkcd927") @@ -114,7 +113,6 @@ def test_dtype_error(): @pytest.mark.parametrize("dtype", [1, 2]) def test_atomic_units_qcschema_ang_error(dtype): - final = qcelemental.molparse.from_string(subject14) with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.to_schema(final["qm"], dtype=dtype, units="Angstrom") @@ -123,7 +121,6 @@ def test_atomic_units_qcschema_ang_error(dtype): def test_psi4_nm_error(): - final = qcelemental.molparse.from_string(subject14) with pytest.raises(qcelemental.ValidationError) as e: qcelemental.molparse.to_schema(final["qm"], dtype="psi4", units="nm") @@ -326,7 +323,6 @@ def test_froto_2_16a(request): @pytest.mark.parametrize("dtype", [1, 2]) def test_tofro_16b(dtype, request): - fullans = copy.deepcopy(schema16_psi4) fullans["provenance"] = _schema_prov_stamp diff --git a/qcelemental/tests/test_molparse_to_string.py b/qcelemental/tests/test_molparse_to_string.py index c9974266..47fdcd8f 100644 --- a/qcelemental/tests/test_molparse_to_string.py +++ b/qcelemental/tests/test_molparse_to_string.py @@ -331,7 +331,6 @@ def test_to_string_xyz(inp, expected): ], ) def test_molecule_to_string(inp, kwargs, expected, request): - smol = _molecule_inputs[inp].to_string(**kwargs) drop_qcsk(_molecule_inputs[inp], request.node.name) assert compare(_molecule_outputs[expected], smol) diff --git a/qcelemental/tests/test_molutil.py b/qcelemental/tests/test_molutil.py new file mode 100644 index 00000000..7a2e66a4 --- /dev/null +++ b/qcelemental/tests/test_molutil.py @@ -0,0 +1,933 @@ +import math +import pprint + +import numpy as np +import pydantic +import pytest + +import qcelemental as qcel +from qcelemental.testing import compare, compare_molrecs, compare_recursive, compare_values + +from .addons import drop_qcsk, using_networkx + +pp = pprint.PrettyPrinter(width=120) + +ss22_12 = """ +C -1.2471894 -1.1718212 -0.6961388 +C -1.2471894 -1.1718212 0.6961388 +N -0.2589510 -1.7235771 1.4144796 +C 0.7315327 -2.2652221 0.6967288 +C 0.7315327 -2.2652221 -0.6967288 +N -0.2589510 -1.7235771 -1.4144796 +H -2.0634363 -0.7223199 -1.2472797 +H -2.0634363 -0.7223199 1.2472797 +H 1.5488004 -2.7128282 1.2475604 +H 1.5488004 -2.7128282 -1.2475604 +C -0.3380031 2.0800608 1.1300452 +C 0.8540254 1.3593471 1.1306308 +N 1.4701787 0.9907598 0.0000000 +C 0.8540254 1.3593471 -1.1306308 +C -0.3380031 2.0800608 -1.1300452 +N -0.9523059 2.4528836 0.0000000 +H -0.8103758 2.3643033 2.0618643 +H 1.3208583 1.0670610 2.0623986 +H 1.3208583 1.0670610 -2.0623986 +H -0.8103758 2.3643033 -2.0618643 +""" + + +@using_networkx +def test_scramble_descrambles_plain(): + s22_12 = qcel.models.Molecule.from_data(ss22_12) + + for trial in range(5): + s22_12.scramble(do_shift=True, do_rotate=True, do_resort=True, do_plot=False, verbose=0, do_test=True) + + +def test_relative_geoms_align_free(request): + s22_12 = qcel.models.Molecule.from_data(ss22_12) + drop_qcsk(s22_12, request.node.name) + + for trial in range(3): + cmol, _ = s22_12.scramble( + do_shift=True, do_rotate=True, do_resort=False, do_plot=False, verbose=2, do_test=True + ) + + rmolrec = qcel.molparse.from_schema(s22_12.dict()) + cmolrec = qcel.molparse.from_schema(cmol.dict()) + assert compare_molrecs(rmolrec, cmolrec, atol=1.0e-4, relative_geoms="align") + + +def test_relative_geoms_align_fixed(request): + s22_12 = qcel.models.Molecule.from_data(ss22_12 + "nocom\nnoreorient\n") + drop_qcsk(s22_12, request.node.name) + + for trial in range(3): + cmol, _ = s22_12.scramble( + do_shift=False, do_rotate=False, do_resort=False, do_plot=False, verbose=2, do_test=True + ) + + rmolrec = qcel.molparse.from_schema(s22_12.dict()) + cmolrec = qcel.molparse.from_schema(cmol.dict()) + assert compare_molrecs(rmolrec, cmolrec, atol=1.0e-4, relative_geoms="align") + + +chiral = qcel.models.Molecule.from_data( + """ + C 0.000000 0.000000 0.000000 +Br 0.000000 0.000000 1.949834 + F 1.261262 0.000000 -0.451181 +Cl -0.845465 1.497406 -0.341118 + H -0.524489 -0.897662 -0.376047 +""" +) + + +@using_networkx +def test_scramble_descrambles_chiral(): + chiral.scramble( + do_shift=True, do_rotate=True, do_resort=True, do_plot=False, verbose=0, do_mirror=False, do_test=True + ) + chiral.scramble( + do_shift=True, do_rotate=True, do_resort=False, do_plot=False, verbose=1, do_mirror=False, do_test=True + ) + for trial in range(5): + chiral.scramble( + do_shift=True, do_rotate=True, do_resort=True, do_plot=False, verbose=0, do_mirror=True, do_test=True + ) + + +soco10 = """ +O 1.0 0.0 0.0 +C 0.0 0.0 0.0 +O -1.0 0.0 0.0 +units ang +""" + +sooc12 = """ +O 1.2 4.0 0.0 +O -1.2 4.0 0.0 +C 0.0 4.0 0.0 +units ang +""" + +s18ooc12 = """ +18O 1.2 4.0 0.0 +O -1.2 4.0 0.0 +C 0.0 4.0 0.0 +units ang +""" + +sooco12 = """ +O 1.2 4.0 0.0 +O -1.2 4.0 0.0 +C 0.0 4.0 0.0 +O 3.0 4.0 0.0 +units ang +""" + +soco12 = """ +O 1.2 4.0 0.0 +C 0.0 4.0 0.0 +O -1.2 4.0 0.0 +units ang +""" + +ref_rmsd = math.sqrt(2.0 * 0.2 * 0.2 / 3.0) # RMSD always in Angstroms + + +@using_networkx +def test_error_bins_b787(): + oco10 = qcel.models.Molecule.from_data(soco10) + oco12 = qcel.models.Molecule.from_data(s18ooc12) + + with pytest.raises(qcel.ValidationError) as e: + oco12.align(oco10, verbose=0) + + assert "atom subclasses unequal" in str(e.value) + + +@using_networkx +def test_error_nat_b787(): + oco10 = qcel.models.Molecule.from_data(soco10) + oco12 = qcel.models.Molecule.from_data(sooco12) + + with pytest.raises(qcel.ValidationError) as e: + oco12.align(oco10, verbose=0) + + assert "natom doesn't match" in str(e.value) + + +def test_mill_shift_error(): + with pytest.raises(pydantic.ValidationError) as e: + qcel.models.AlignmentMill(shift=[0, 1]) + + assert "Shift must be castable to shape" in str(e.value) + + +def test_mill_rot_error(): + with pytest.raises(pydantic.ValidationError) as e: + qcel.models.AlignmentMill(rotation=[0, 1, 3]) + + assert "Rotation must be castable to shape" in str(e.value) + + +@using_networkx +def test_b787(): + oco10 = qcel.molparse.from_string(soco10) + oco12 = qcel.molparse.from_string(sooc12) + + oco10_geom_au = oco10["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + oco12_geom_au = oco12["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + + rmsd, mill = qcel.molutil.B787( + oco10_geom_au, + oco12_geom_au, + np.array(["O", "C", "O"]), + np.array(["O", "O", "C"]), + algorithm="permutative", + verbose=4, + do_plot=False, + ) + + assert compare_values(ref_rmsd, rmsd, "known rmsd B787", atol=1.0e-6) + + +@using_networkx +def test_b787_atomsmap(): + oco10 = qcel.molparse.from_string(soco10) + oco12 = qcel.molparse.from_string(soco12) + + oco10_geom_au = oco10["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + oco12_geom_au = oco12["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + + rmsd, mill = qcel.molutil.B787(oco10_geom_au, oco12_geom_au, None, None, atoms_map=True) + + assert compare_values(ref_rmsd, rmsd, "known rmsd B787", atol=1.0e-6) + + +@using_networkx +def test_model_b787(): + oco10 = qcel.models.Molecule.from_data(soco10) + oco12 = qcel.models.Molecule.from_data(sooc12) + + mol, data = oco12.align(oco10, verbose=4) + + assert compare_values(ref_rmsd, data["rmsd"], "known rmsd qcel.models.Molecule.align", atol=1.0e-6) + + +def test_error_kabsch(): + with pytest.raises(qcel.ValidationError) as e: + qcel.molutil.kabsch_align([1, 2, 3], [4, 5, 6], weight=7) + + assert "for kwarg 'weight'" in str(e.value) + + +@using_networkx +def test_kabsch_identity(): + oco10 = qcel.molparse.from_string(soco10) + oco12 = qcel.molparse.from_string(soco10) + + oco10_geom_au = oco10["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + oco12_geom_au = oco12["qm"]["geom"].reshape((-1, 3)) / qcel.constants.bohr2angstroms + + rmsd, rot, shift = qcel.molutil.kabsch_align(oco10_geom_au, oco12_geom_au) + + assert compare_values(0.0, rmsd, "identical") + assert compare_values(np.identity(3), rot, "identity rotation matrix") + assert compare_values(np.zeros(3), shift, "identical COM") + + +trop_cs = qcel.models.Molecule.from_data( + """ + C -3.19247825 2.43488661 0.00000000 + C -4.39993972 0.13119097 0.00000000 + C -3.25125097 -2.33609553 0.00000000 + C -0.53296611 2.98441107 0.00000000 + C -0.74683325 -3.02798473 0.00000000 + C 1.48688415 1.34759833 0.00000000 + H -4.41324589 4.10714388 0.00000000 + H -6.46804026 0.15889833 0.00000000 + H -4.59260816 -3.91576186 0.00000000 + H -0.00999373 4.98699344 0.00000000 + H -0.30873683 -5.05056347 0.00000000 + C 1.53303555 -1.47231513 0.00000000 + O 3.67104984 -2.45774212 0.00000000 + O 3.84147141 2.33923482 0.00000000 + H 4.95785438 0.85953513 0.00000000 + units au +""" +) + +trop_gs_c2v = qcel.models.Molecule.from_data( + """ + C 2.38842439 0.00000000 -3.20779039 + C 0.00000000 0.00000000 -4.37431891 + C -2.38842439 0.00000000 -3.20779039 + C 3.04548001 0.00000000 -0.63779964 + C -3.04548001 0.00000000 -0.63779964 + C 1.40969252 0.00000000 1.46237865 + C -1.40969252 0.00000000 1.46237865 + O 2.17618825 0.00000000 3.78161558 + O -2.17618825 0.00000000 3.78161558 + H 0.00000000 0.00000000 4.59454571 + H 0.00000000 0.00000000 -6.44213321 + H 4.00103632 0.00000000 -4.50882987 + H -4.00103632 0.00000000 -4.50882987 + H 5.05910161 0.00000000 -0.16572021 + H -5.05910161 0.00000000 -0.16572021 + units au +""" +) + + +@using_networkx +def test_tropolone_b787(): + mol, data = trop_cs.align(trop_gs_c2v, do_plot=False, verbose=0, uno_cutoff=0.5) + assert compare_values(0.1413, data["rmsd"], "cs<-->c2v tropolones align", atol=1.0e-2) + + +def test_scramble_identity(): + mill = qcel.molutil.compute_scramble( + 4, do_resort=False, do_shift=False, do_rotate=False, deflection=1.0, do_mirror=False + ) + + mill_str = """---------------------------------------- + AlignmentMill + eye +---------------------------------------- +Mirror: False +Atom Map: [0 1 2 3] +Shift: [0. 0. 0.] +Rotation: +[[1. 0. 0.] + [0. 1. 0.] + [0. 0. 1.]] +----------------------------------------""" + + assert compare(mill_str, mill.pretty_print(label="eye")) + + mill_dict = { + "shift": [0.0, 0.0, 0.0], + "rotation": [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], + "atommap": [0, 1, 2, 3], + "mirror": False, + } + + assert compare_recursive(mill_dict, mill.dict()) + mill_dict["rotation"] = [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] + assert compare_recursive(mill_dict, mill.dict(encoding="json")) + + +def test_scramble_specific(): + mill = qcel.molutil.compute_scramble( + 4, + do_resort=[1, 2, 0, 3], + do_shift=[-1.82564537, 2.25391838, -2.56591963], + do_rotate=[ + [0.39078817, -0.9101616, -0.13744259], + [0.36750838, 0.29117465, -0.88326379], + [0.84393258, 0.29465774, 0.44827962], + ], + ) + + mill_str = """---------------------------------------- + AlignmentMill +---------------------------------------- +Mirror: False +Atom Map: [1 2 0 3] +Shift: [-1.82564537 2.25391838 -2.56591963] +Rotation: +[[ 0.39078817 -0.9101616 -0.13744259] + [ 0.36750838 0.29117465 -0.88326379] + [ 0.84393258 0.29465774 0.44827962]] +----------------------------------------""" + + assert compare(mill_str, mill.pretty_print()) + + +def test_hessian_align(request): + # from Psi4 test test_hessian_vs_cfour[HOOH_TS-H_analytic] + + rmill = """ +---------------------------------------- + AlignmentMill +---------------------------------------- +Mirror: False +Atom Map: [0 1 2 3] +Shift: [0.00000000e+00 0.00000000e+00 1.32217718e-10] +Rotation: +[[ 9.99999870e-01 -5.08999836e-04 -0.00000000e+00] + [ 5.08999836e-04 9.99999870e-01 0.00000000e+00] + [ 0.00000000e+00 -0.00000000e+00 1.00000000e+00]] +---------------------------------------- +""" + + p4_hooh_xyz = """ + units au + H 1.8327917647 -1.5752960165 -0.0000055594 + O 1.3171390326 0.1388012713 0.0000003503 + O -1.3171390326 -0.1388012713 0.0000003503 + H -1.8327917647 1.5752960165 -0.0000055594 + """ + + c4_hooh_xyz = """ + H 1.83199008 -1.57622903 -0.00000556 + O 1.31720978 0.13813086 0.00000035 + O -1.31720978 -0.13813086 0.00000035 + H -1.83199008 1.57622903 -0.00000556 + units au""" + + c4_hooh_hess = np.array( + [ + [ + 1.26389745e-01, + -1.48044116e-01, + -5.10600000e-07, + -8.25705803e-02, + 8.94682153e-02, + 3.59200000e-07, + -2.97329883e-02, + 6.20787276e-02, + 1.15100000e-07, + -1.40861766e-02, + -3.50282710e-03, + 3.62000000e-08, + ], + [ + -1.48044116e-01, + 5.70582596e-01, + 1.97410000e-06, + 1.70543201e-01, + -5.85505592e-01, + -2.01940000e-06, + -1.89962579e-02, + 1.41465336e-02, + 4.31000000e-08, + -3.50282710e-03, + 7.76462400e-04, + 2.30000000e-09, + ], + [ + -5.10600000e-07, + 1.97410000e-06, + -1.39249540e-03, + 5.48500000e-07, + -2.04760000e-06, + 1.39251280e-03, + -1.70000000e-09, + 7.57000000e-08, + 1.39261030e-03, + -3.62000000e-08, + -2.30000000e-09, + -1.39262780e-03, + ], + [ + -8.25705803e-02, + 1.70543201e-01, + 5.48500000e-07, + 4.71087328e-01, + -1.32453772e-01, + -3.97100000e-07, + -3.58783760e-01, + -1.90931709e-02, + -1.53000000e-07, + -2.97329883e-02, + -1.89962579e-02, + 1.70000000e-09, + ], + [ + 8.94682153e-02, + -5.85505592e-01, + -2.04760000e-06, + -1.32453772e-01, + 6.80349634e-01, + 2.09300000e-06, + -1.90931709e-02, + -1.08990576e-01, + 3.04000000e-08, + 6.20787276e-02, + 1.41465336e-02, + -7.57000000e-08, + ], + [ + 3.59200000e-07, + -2.01940000e-06, + 1.39251280e-03, + -3.97100000e-07, + 2.09300000e-06, + -1.39226390e-03, + 1.53000000e-07, + -3.04000000e-08, + -1.39285910e-03, + -1.15100000e-07, + -4.31000000e-08, + 1.39261030e-03, + ], + [ + -2.97329883e-02, + -1.89962579e-02, + -1.70000000e-09, + -3.58783760e-01, + -1.90931709e-02, + 1.53000000e-07, + 4.71087328e-01, + -1.32453772e-01, + 3.97100000e-07, + -8.25705803e-02, + 1.70543201e-01, + -5.48500000e-07, + ], + [ + 6.20787276e-02, + 1.41465336e-02, + 7.57000000e-08, + -1.90931709e-02, + -1.08990576e-01, + -3.04000000e-08, + -1.32453772e-01, + 6.80349634e-01, + -2.09300000e-06, + 8.94682153e-02, + -5.85505592e-01, + 2.04760000e-06, + ], + [ + 1.15100000e-07, + 4.31000000e-08, + 1.39261030e-03, + -1.53000000e-07, + 3.04000000e-08, + -1.39285910e-03, + 3.97100000e-07, + -2.09300000e-06, + -1.39226390e-03, + -3.59200000e-07, + 2.01940000e-06, + 1.39251280e-03, + ], + [ + -1.40861766e-02, + -3.50282710e-03, + -3.62000000e-08, + -2.97329883e-02, + 6.20787276e-02, + -1.15100000e-07, + -8.25705803e-02, + 8.94682153e-02, + -3.59200000e-07, + 1.26389745e-01, + -1.48044116e-01, + 5.10600000e-07, + ], + [ + -3.50282710e-03, + 7.76462400e-04, + -2.30000000e-09, + -1.89962579e-02, + 1.41465336e-02, + -4.31000000e-08, + 1.70543201e-01, + -5.85505592e-01, + 2.01940000e-06, + -1.48044116e-01, + 5.70582596e-01, + -1.97410000e-06, + ], + [ + 3.62000000e-08, + 2.30000000e-09, + -1.39262780e-03, + 1.70000000e-09, + -7.57000000e-08, + 1.39261030e-03, + -5.48500000e-07, + 2.04760000e-06, + 1.39251280e-03, + 5.10600000e-07, + -1.97410000e-06, + -1.39249540e-03, + ], + ] + ) + + # generated from native psi4 geometry before alignment to cfour geometry + p4_hooh_hess_native = np.array( + [ + [ + 1.26540599e-01, + -1.48270387e-01, + -5.11580572e-07, + -8.27030303e-02, + 8.97243491e-02, + 3.60275814e-07, + -2.97549532e-02, + 6.20564277e-02, + 1.15085371e-07, + -1.40826158e-02, + -3.51038930e-03, + 3.62193872e-08, + ], + [ + -1.48270387e-01, + 5.70432494e-01, + 1.97383511e-06, + 1.70799401e-01, + -5.85373844e-01, + -2.01926658e-06, + -1.90186246e-02, + 1.41684555e-02, + 4.31889743e-08, + -3.51038930e-03, + 7.72894141e-04, + 2.24249453e-09, + ], + [ + -5.11580572e-07, + 1.97383511e-06, + -1.39261744e-03, + 5.49492724e-07, + -2.04733779e-06, + 1.39262576e-03, + -1.69276446e-09, + 7.57451701e-08, + 1.39262075e-03, + -3.62193872e-08, + -2.24249453e-09, + -1.39262906e-03, + ], + [ + -8.27030303e-02, + 1.70799401e-01, + 5.49492724e-07, + 4.71222858e-01, + -1.32560453e-01, + -3.98187966e-07, + -3.58764874e-01, + -1.92203240e-02, + -1.52997522e-07, + -2.97549532e-02, + -1.90186246e-02, + 1.69276446e-09, + ], + [ + 8.97243491e-02, + -5.85373844e-01, + -2.04733779e-06, + -1.32560453e-01, + 6.80215453e-01, + 2.09276925e-06, + -1.92203240e-02, + -1.09010064e-01, + 3.03137013e-08, + 6.20564277e-02, + 1.41684555e-02, + -7.57451701e-08, + ], + [ + 3.60275814e-07, + -2.01926658e-06, + 1.39262576e-03, + -3.98187966e-07, + 2.09276925e-06, + -1.39245013e-03, + 1.52997522e-07, + -3.03137013e-08, + -1.39279639e-03, + -1.15085371e-07, + -4.31889743e-08, + 1.39262075e-03, + ], + [ + -2.97549532e-02, + -1.90186246e-02, + -1.69276446e-09, + -3.58764874e-01, + -1.92203240e-02, + 1.52997522e-07, + 4.71222858e-01, + -1.32560453e-01, + 3.98187966e-07, + -8.27030303e-02, + 1.70799401e-01, + -5.49492724e-07, + ], + [ + 6.20564277e-02, + 1.41684555e-02, + 7.57451701e-08, + -1.92203240e-02, + -1.09010064e-01, + -3.03137013e-08, + -1.32560453e-01, + 6.80215453e-01, + -2.09276925e-06, + 8.97243491e-02, + -5.85373844e-01, + 2.04733779e-06, + ], + [ + 1.15085371e-07, + 4.31889743e-08, + 1.39262075e-03, + -1.52997522e-07, + 3.03137013e-08, + -1.39279639e-03, + 3.98187966e-07, + -2.09276925e-06, + -1.39245013e-03, + -3.60275814e-07, + 2.01926658e-06, + 1.39262576e-03, + ], + [ + -1.40826158e-02, + -3.51038930e-03, + -3.62193872e-08, + -2.97549532e-02, + 6.20564277e-02, + -1.15085371e-07, + -8.27030303e-02, + 8.97243491e-02, + -3.60275814e-07, + 1.26540599e-01, + -1.48270387e-01, + 5.11580572e-07, + ], + [ + -3.51038930e-03, + 7.72894141e-04, + -2.24249453e-09, + -1.90186246e-02, + 1.41684555e-02, + -4.31889743e-08, + 1.70799401e-01, + -5.85373844e-01, + 2.01926658e-06, + -1.48270387e-01, + 5.70432494e-01, + -1.97383511e-06, + ], + [ + 3.62193872e-08, + 2.24249453e-09, + -1.39262906e-03, + 1.69276446e-09, + -7.57451701e-08, + 1.39262075e-03, + -5.49492724e-07, + 2.04733779e-06, + 1.39262576e-03, + 5.11580572e-07, + -1.97383511e-06, + -1.39261744e-03, + ], + ] + ) + + p4mol = qcel.models.Molecule.from_data(p4_hooh_xyz) + c4mol = qcel.models.Molecule.from_data(c4_hooh_xyz) + drop_qcsk(c4mol, request.node.name) + aqmol, data = p4mol.align(c4mol, atoms_map=True, mols_align=True, verbose=4) + mill = data["mill"] + + assert compare([0, 1, 2, 3], mill.atommap) + assert compare_values( + [ + [9.99999870e-01, -5.08999836e-04, -0.00000000e00], + [5.08999836e-04, 9.99999870e-01, 0.00000000e00], + [0.00000000e00, -0.00000000e00, 1.00000000e00], + ], + mill.rotation, + atol=1.0e-6, + ) + + p2cgeom = mill.align_coordinates(p4mol.geometry) + assert compare_values(c4mol.geometry, p2cgeom, atol=1.0e-6) + + p2chess = mill.align_hessian(p4_hooh_hess_native) + assert compare_values(c4_hooh_hess, p2chess, atol=1.0e-4) + + +@using_networkx +def test_vector_gradient_align(): + # HOOH TS (optimized to be very nearly planar) + p4_hooh_xyz = """ + units au + H 1.8327917647 -1.5752960165 -0.0000055594 + O 1.3171390326 0.1388012713 0.0000003503 + O -1.3171390326 -0.1388012713 0.0000003503 + H -1.8327917647 1.5752960165 -0.0000055594 + """ + + # from C4 GRD file, produced by p4_hooh_xyz in ZMAT + c4_hooh_xyz = """ + units au + H 1.8319897007 -1.5762287045 -0.0000055594 + H -1.8319897007 1.5762287045 -0.0000055594 + O 1.3172095119 0.1381308288 0.0000003503 + O -1.3172095119 -0.1381308288 0.0000003503 + """ + + # From C4 DIPDER file, analytic + c4_hooh_dipder_x = np.array( + [ + 0.2780463810, + -0.0627423838, + -0.0000001663, + 0.2780463810, + -0.0627423838, + 0.0000001663, + -0.2780463810, + 0.0627423838, + 0.0000007872, + -0.2780463810, + 0.0627423838, + -0.0000007872, + ] + ) + c4_hooh_dipder_y = np.array( + [ + -0.0452364698, + 0.2701572972, + -0.0000004246, + -0.0452364698, + 0.2701572972, + 0.0000004246, + 0.0452364698, + -0.2701572972, + 0.0000007936, + 0.0452364698, + -0.2701572972, + -0.0000007936, + ] + ) + c4_hooh_dipder_z = np.array( + [ + -0.0000001575, + -0.0000004725, + 0.4019549601, + 0.0000001575, + 0.0000004725, + 0.4019549601, + -0.0000000523, + 0.0000008401, + -0.4019549601, + 0.0000000523, + -0.0000008401, + -0.4019549601, + ] + ) + + # Generated from fixing orientation/com in psi4. Then using + # a 5-point finite differences of nuclear gradients computed + # with an applied electric field to produce a file17.dat. + p4_hooh_dipder_x = np.array( + [ + 0.2781013514, + -0.0627383175, + -0.0000001660, + -0.2781013514, + 0.0627383175, + 0.0000007867, + -0.2781013514, + 0.0627383175, + -0.0000007867, + 0.2781013514, + -0.0627383175, + 0.0000001660, + ] + ) + p4_hooh_dipder_y = np.array( + [ + -0.0452324587, + 0.2701024305, + -0.0000004247, + 0.0452324587, + -0.2701024305, + 0.0000007939, + 0.0452324587, + -0.2701024305, + -0.0000007939, + -0.0452324587, + 0.2701024305, + 0.0000004247, + ] + ) + p4_hooh_dipder_z = np.array( + [ + -0.0000001572, + -0.0000004726, + 0.4019549470, + -0.0000000527, + 0.0000008401, + -0.4019549470, + 0.0000000527, + -0.0000008401, + -0.4019549470, + 0.0000001572, + 0.0000004726, + 0.4019549470, + ] + ) + p4_hooh_dipder = np.concatenate((p4_hooh_dipder_x, p4_hooh_dipder_y, p4_hooh_dipder_z)).reshape(3, -1) + + p4mol = qcel.models.Molecule.from_data(p4_hooh_xyz) + c4mol = qcel.models.Molecule.from_data(c4_hooh_xyz) + aqmol, data = p4mol.align(c4mol, atoms_map=False, mols_align=True, verbose=4) + mill = data["mill"] + + assert compare([0, 3, 1, 2], mill.atommap) + + p2cgeom = mill.align_coordinates(p4mol.geometry) + assert compare_values(c4mol.geometry, p2cgeom, atol=1.0e-6) + + p2c_dipder_x, p2c_dipder_y, p2c_dipder_z = mill.align_vector_gradient(p4_hooh_dipder) + + assert compare_values(c4_hooh_dipder_x, p2c_dipder_x, atol=1.0e-5) + assert compare_values(c4_hooh_dipder_y, p2c_dipder_y, atol=1.0e-5) + assert compare_values(c4_hooh_dipder_z, p2c_dipder_z, atol=1.0e-5) + + +@pytest.mark.parametrize( + "args, kwargs, ans", + [ + ((["C", "C"], [0, 0, 0, 0, 0, 3]), {}, [(0, 1)]), + ((["C", "C"], [0, 0, 0, 0, 0, 3]), {"threshold": 4}, [(0, 1)]), + ((["C", "C"], [0, 0, 0, 0, 0, 10]), {}, []), + ((["C", "C"], [0, 0, 0, 0, 0, 2]), {"default_connectivity": 3}, [(0, 1, 3)]), + ((["C", "C", "C"], [0, 0, 0, 0, 0, 3, 0, 0, -3]), {}, [(0, 1), (0, 2)]), + ((["C", "Unknown"], [0, 0, 0, 0, 0, 3]), {}, [(0, 1)]), + ], +) +def test_guess_connectivity(args, kwargs, ans): + computed = qcel.molutil.guess_connectivity(*args, **kwargs) + assert compare(computed, ans) + + +@pytest.mark.parametrize( + "input,order,expected", + [ + ("NH3", "alphabetical", "H3N"), + ("NH3", "hill", "H3N"), + ("CH4", "alphabetical", "CH4"), + ("CH4", "hill", "CH4"), + ("IBr", "alphabetical", "BrI"), + ("IBr", "hill", "BrI"), + ("CCl4", "alphabetical", "CCl4"), + ("CCl4", "hill", "CCl4"), + ("CBr4", "alphabetical", "Br4C"), + ("CBr4", "hill", "CBr4"), + ("CBrH3", "alphabetical", "BrCH3"), + ("CBrH3", "hill", "CH3Br"), + ], +) +def test_order_molecular_formula(input, order, expected): + assert qcel.molutil.order_molecular_formula(input, order=order) == expected + + +def test_bad_formula_order(): + with pytest.raises(ValueError): + qcel.molutil.order_molecular_formula("CH4", order="disorder") + with pytest.raises(ValueError): + qcel.molutil.order_molecular_formula("ch4") diff --git a/qcelemental/util/test_scipy_hungarian.py b/qcelemental/tests/test_scipy_hungarian.py similarity index 99% rename from qcelemental/util/test_scipy_hungarian.py rename to qcelemental/tests/test_scipy_hungarian.py index c88263c5..41a0a05d 100644 --- a/qcelemental/util/test_scipy_hungarian.py +++ b/qcelemental/tests/test_scipy_hungarian.py @@ -15,7 +15,6 @@ def test_linear_sum_assignment(): - # fmt: off data = [ # Square @@ -66,7 +65,6 @@ def test_linear_sum_assignment(): # fmt: on for cost_matrix, expected_cost, expected_reduced_cost_matrix in data: - cost_matrix = np.array(cost_matrix) (row_ind, col_ind), reduced_cost_matrix = linear_sum_assignment(cost_matrix, return_cost=True) assert_array_equal(row_ind, np.sort(row_ind)) diff --git a/qcelemental/tests/test_zqcschema.py b/qcelemental/tests/test_zqcschema.py index 6d314b8d..d84324d6 100644 --- a/qcelemental/tests/test_zqcschema.py +++ b/qcelemental/tests/test_zqcschema.py @@ -25,12 +25,3 @@ def test_qcschema(fl, qcschema_models): res = jsonschema.validate(instance, qcschema_models[model]) assert res is None - - -# import pprint -# print("\n\n<<< SCHEMA") -# pprint.pprint(schemas["BasisSet"]) -# print("\n\n<<< INSTANCE") -# pprint.pprint(instance) - -# assert 0 diff --git a/qcelemental/util/__init__.py b/qcelemental/util/__init__.py index 04ae84cd..4cbdcec7 100644 --- a/qcelemental/util/__init__.py +++ b/qcelemental/util/__init__.py @@ -3,8 +3,6 @@ from .importing import parse_version, safe_version, which, which_import from .internal import provenance_stamp from .itertools import unique_everseen - -# from .mpl import plot_coord from .misc import ( compute_angle, compute_dihedral, @@ -25,9 +23,9 @@ json_loads, jsonext_dumps, jsonext_loads, - msgpackext_dumps, - msgpackext_loads, msgpack_dumps, msgpack_loads, + msgpackext_dumps, + msgpackext_loads, serialize, ) diff --git a/qcelemental/util/autodocs.py b/qcelemental/util/autodocs.py index afa35932..7d6a6a20 100644 --- a/qcelemental/util/autodocs.py +++ b/qcelemental/util/autodocs.py @@ -151,7 +151,7 @@ def doc_formatter(base_docs: str, target_object: BaseModel, allow_failure: bool second_line = "\n" + indent(prop_desc, " ") if prop_desc is not None else "" # Finally, write the detailed doc string new_doc += first_line + second_line + "\n" - except: # lgtm [py/catch-base-exception] + except: if allow_failure: new_doc = base_docs else: @@ -178,7 +178,6 @@ def __init__(self, target: BaseModel, allow_failure: bool = True, always_apply: if not always_apply: if isinstance(target, BaseModel) or (isinstance(target, type) and issubclass(target, BaseModel)): - if ( hasattr(target, self.ALREADY_AUTODOCED_ATTR) and getattr(target, self.ALREADY_AUTODOCED_ATTR) is True @@ -205,7 +204,7 @@ def __del__(self): self.target.__doc__ = self.base_doc if hasattr(self.target, self.ALREADY_AUTODOCED_ATTR): setattr(self.target, self.ALREADY_AUTODOCED_ATTR, False) - except: # lgtm [py/catch-base-exception] + except: # Corner case where trying to reapply and failing cannot delete the new self mid __init__ since # base_doc has not been set. pass diff --git a/qcelemental/util/gph_uno_bipartite.py b/qcelemental/util/gph_uno_bipartite.py index 497fb434..7301b291 100644 --- a/qcelemental/util/gph_uno_bipartite.py +++ b/qcelemental/util/gph_uno_bipartite.py @@ -153,7 +153,6 @@ def _enumMaximumMatchingIter(g, match, all_matches, add_e=None): cycles = list(nx.simple_cycles(d)) if len(cycles) == 0: - # ---------If no cycle, find a feasible path--------- all_uncovered = set(g.nodes).difference(set([ii[0] for ii in match])) all_uncovered = all_uncovered.difference(set([ii[1] for ii in match])) @@ -167,7 +166,6 @@ def _enumMaximumMatchingIter(g, match, all_matches, add_e=None): idx = 0 uncovered = all_uncovered[idx] while True: - if uncovered not in nx.isolates(g): paths = nx.single_source_shortest_path(d, uncovered, cutoff=2) len2paths = [vv for kk, vv in paths.items() if len(vv) == 3] diff --git a/qcelemental/util/importing.py b/qcelemental/util/importing.py index 6e3fd8f1..9bc4f347 100644 --- a/qcelemental/util/importing.py +++ b/qcelemental/util/importing.py @@ -103,7 +103,8 @@ def safe_version(*args, **kwargs) -> str: """ import pkg_resources - return pkg_resources.safe_version(*args, **kwargs) + version = pkg_resources.safe_version(*args, **kwargs) + return version def parse_version(*args, **kwargs): diff --git a/qcelemental/util/internal.py b/qcelemental/util/internal.py index 8043d569..41fd7b89 100644 --- a/qcelemental/util/internal.py +++ b/qcelemental/util/internal.py @@ -1,6 +1,6 @@ from typing import Dict -from qcelemental.extras import get_information +from qcelemental import __version__ def provenance_stamp(routine: str) -> Dict[str, str]: @@ -10,4 +10,4 @@ def provenance_stamp(routine: str) -> Dict[str, str]: generating routine's name is passed in through `routine`. """ - return {"creator": "QCElemental", "version": get_information("version"), "routine": routine} + return {"creator": "QCElemental", "version": __version__, "routine": routine} diff --git a/qcelemental/util/serialization.py b/qcelemental/util/serialization.py index a8f719e1..528de816 100644 --- a/qcelemental/util/serialization.py +++ b/qcelemental/util/serialization.py @@ -138,7 +138,6 @@ def default(self, obj: Any) -> Any: def jsonext_decode(obj: Any) -> Any: - if "_nd_" in obj: arr = np.frombuffer(bytes.fromhex(obj["data"]), dtype=obj["dtype"]) if "shape" in obj: diff --git a/qcelemental/vanderwaals_radii.py b/qcelemental/vanderwaals_radii.py index 21aa7541..72dc7b1d 100644 --- a/qcelemental/vanderwaals_radii.py +++ b/qcelemental/vanderwaals_radii.py @@ -61,7 +61,7 @@ def __str__(self) -> str: def get( self, atom: Union[int, str], *, return_tuple: bool = False, units: str = "bohr", missing: float = None - ) -> Union[float, "Datum"]: # lgtm [py/similar-function] + ) -> Union[float, "Datum"]: r""" Access a van der Waals radius for species ``atom``. @@ -129,7 +129,7 @@ def string_representation(self) -> str: return print_variables(self.vdwr) - def write_c_header(self, filename: str = "vdwrad.h", missing: float = 2.0) -> None: # lgtm [py/similar-function] + def write_c_header(self, filename: str = "vdwrad.h", missing: float = 2.0) -> None: r"""Write C header file defining Van der Waals radii array. Parameters diff --git a/raw_data/cpu_data/build_cpu_data.py b/raw_data/cpu_data/build_cpu_data.py index 8d211231..7c3d241b 100644 --- a/raw_data/cpu_data/build_cpu_data.py +++ b/raw_data/cpu_data/build_cpu_data.py @@ -1,7 +1,7 @@ -import pandas as pd -import black import json +import black +import pandas as pd with open("intel_cpu_database.json", "r") as handle: intel_raw = json.loads(handle.read()) @@ -36,7 +36,6 @@ def parse_amd_clock(name): def parse_amd_launch(d): - if d is None: return None @@ -105,7 +104,7 @@ def parse_amd_launch(d): # Some processors are classified as "Mobile", which are almost certainly in laptops not phones # Excluding atom procs instead, since they're the ones the break the parser - #if row["Essentials"]["Vertical Segment"] == "Mobile": + # if row["Essentials"]["Vertical Segment"] == "Mobile": # continue if "Intel Atom " in row["name"]: continue @@ -130,15 +129,12 @@ def parse_amd_launch(d): raise - def parse_intel_clock(name): - if name is None: return None name = name.lower() - if "mhz" in name: repl = "mhz" coef = 1e6 @@ -155,7 +151,6 @@ def parse_intel_clock(name): def parse_instructions(inst): - if inst is None: return None @@ -167,7 +162,6 @@ def parse_instructions(inst): def parse_date(d): - if d is None: return None @@ -198,19 +192,22 @@ def parse_date(d): # add extra data import extra_cpus + for i in extra_cpus.extra_cpus: df = df.append(i, ignore_index=True) + def name(vendor, family, model, clock_speed): if vendor.lower() in family.lower(): vendor = "" if family in str(model): family = "" - if family.endswith("Processors") and family[:-len("Processors")] in str(model): + if family.endswith("Processors") and family[: -len("Processors")] in str(model): family = "" return f"{vendor} {family} {model} @ {clock_speed/1_000_000_000:.1f} GHz" + df["name"] = df.apply(lambda row: name(row["vendor"], row["family"], row["model"], row["base_clock"]), axis=1) for (vendor, model), fix in extra_cpus.fixes.items(): @@ -220,14 +217,14 @@ def name(vendor, family, model, clock_speed): # Print some data for posterity print(df[df["vendor"] == "intel"].tail()) print(df[df["vendor"] == "amd"].tail()) -print('---') +print("---") # Handle nthreads == ncore bugs mask = (df["nthreads"] == "") | df["nthreads"].isnull() df.loc[mask, "nthreads"] = df.loc[mask, "ncores"] mask = (df["nthreads"] != "") & df["nthreads"].notnull() -#print(df[~mask]) +# print(df[~mask]) cnt = df.shape[0] df = df[mask] print(f"Dropped {cnt - df.shape[0]} / {cnt} processors without ncores") @@ -243,7 +240,6 @@ def name(vendor, family, model, clock_speed): df.drop_duplicates(subset=["vendor", "model"], keep="last", inplace=True) - output = f''' """ Processor data from multiple sources and vendors. @@ -253,10 +249,18 @@ def name(vendor, family, model, clock_speed): ''' + def to_python_str(data): - return json.dumps(data, indent=2).replace("true", "True").replace("false", "False").replace("NaN", "None").replace("null", "None") + return ( + json.dumps(data, indent=2) + .replace("true", "True") + .replace("false", "False") + .replace("NaN", "None") + .replace("null", "None") + ) + -output += f"data_rows = {to_python_str([tuple(x[1].values) for x in df.iterrows()])}\n" +output += f"data_rows = {to_python_str([tuple(x[1].values) for x in df.iterrows()])}\n" output += f"data_columns = {to_python_str(list(df.columns))}\n" output += "data_blob = [{k: v for k, v in zip(data_columns, row)} for row in data_rows]\n" diff --git a/raw_data/cpu_data/extra_cpus.py b/raw_data/cpu_data/extra_cpus.py index b5e8a2cd..9584798e 100644 --- a/raw_data/cpu_data/extra_cpus.py +++ b/raw_data/cpu_data/extra_cpus.py @@ -64,5 +64,5 @@ ("amd", 6276): {"launch_date": 2011}, ("amd", "AMD EPYC\u2122 7401P"): {"launch_date": 2017}, ("amd", "AMD EPYC\u2122 7601"): {"launch_date": 2017}, - ("amd", 6274): {"launch_date": 2011} -} \ No newline at end of file + ("amd", 6274): {"launch_date": 2011}, +} diff --git a/raw_data/dft_data/build_dft_info.py b/raw_data/dft_data/build_dft_info.py index ba910af5..6ae5895c 100644 --- a/raw_data/dft_data/build_dft_info.py +++ b/raw_data/dft_data/build_dft_info.py @@ -1,6 +1,7 @@ -import psi4 import json + import black +import psi4 dft_info = {"version": psi4.__version__, "functionals": {}} diff --git a/raw_data/nist_data/build_periodic_table.py b/raw_data/nist_data/build_periodic_table.py index 7971d7c6..2a3e40a9 100644 --- a/raw_data/nist_data/build_periodic_table.py +++ b/raw_data/nist_data/build_periodic_table.py @@ -2,181 +2,181 @@ This file will generate a JSON blob usable by QCElemental for physical constants """ -import json import datetime -import requests -from yapf.yapflib.yapf_api import FormatCode +import json import re -from decimal import Decimal + +import requests +from yapf.yapflib.yapf_api import FormatCode # noqa # from https://www.nist.gov/pml/periodic-table-elements on 30 Aug 2018 # NIST SP 966 (July 2018) element_names = [ - 'Hydrogen', - 'Helium', - 'Lithium', - 'Beryllium', - 'Boron', - 'Carbon', - 'Nitrogen', - 'Oxygen', - 'Fluorine', - 'Neon', - 'Sodium', - 'Magnesium', - 'Aluminum', - 'Silicon', - 'Phosphorus', - 'Sulfur', - 'Chlorine', - 'Argon', - 'Potassium', - 'Calcium', - 'Scandium', - 'Titanium', - 'Vanadium', - 'Chromium', - 'Manganese', - 'Iron', - 'Cobalt', - 'Nickel', - 'Copper', - 'Zinc', - 'Gallium', - 'Germanium', - 'Arsenic', - 'Selenium', - 'Bromine', - 'Krypton', - 'Rubidium', - 'Strontium', - 'Yttrium', - 'Zirconium', - 'Niobium', - 'Molybdenum', - 'Technetium', - 'Ruthenium', - 'Rhodium', - 'Palladium', - 'Silver', - 'Cadmium', - 'Indium', - 'Tin', - 'Antimony', - 'Tellurium', - 'Iodine', - 'Xenon', - 'Cesium', - 'Barium', - 'Lanthanum', - 'Cerium', - 'Praseodymium', - 'Neodymium', - 'Promethium', - 'Samarium', - 'Europium', - 'Gadolinium', - 'Terbium', - 'Dysprosium', - 'Holmium', - 'Erbium', - 'Thulium', - 'Ytterbium', - 'Lutetium', - 'Hafnium', - 'Tantalum', - 'Tungsten', - 'Rhenium', - 'Osmium', - 'Iridium', - 'Platinum', - 'Gold', - 'Mercury', - 'Thallium', - 'Lead', - 'Bismuth', - 'Polonium', - 'Astatine', - 'Radon', - 'Francium', - 'Radium', - 'Actinium', - 'Thorium', - 'Protactinium', - 'Uranium', - 'Neptunium', - 'Plutonium', - 'Americium', - 'Curium', - 'Berkelium', - 'Californium', - 'Einsteinium', - 'Fermium', - 'Mendelevium', - 'Nobelium', - 'Lawrencium', - 'Rutherfordium', - 'Dubnium', - 'Seaborgium', - 'Bohrium', - 'Hassium', - 'Meitnerium', - 'Darmstadtium', - 'Roentgenium', - 'Copernicium', - 'Nihonium', - 'Flerovium', - 'Moscovium', - 'Livermorium', - 'Tennessine', - 'Oganesson', + "Hydrogen", + "Helium", + "Lithium", + "Beryllium", + "Boron", + "Carbon", + "Nitrogen", + "Oxygen", + "Fluorine", + "Neon", + "Sodium", + "Magnesium", + "Aluminum", + "Silicon", + "Phosphorus", + "Sulfur", + "Chlorine", + "Argon", + "Potassium", + "Calcium", + "Scandium", + "Titanium", + "Vanadium", + "Chromium", + "Manganese", + "Iron", + "Cobalt", + "Nickel", + "Copper", + "Zinc", + "Gallium", + "Germanium", + "Arsenic", + "Selenium", + "Bromine", + "Krypton", + "Rubidium", + "Strontium", + "Yttrium", + "Zirconium", + "Niobium", + "Molybdenum", + "Technetium", + "Ruthenium", + "Rhodium", + "Palladium", + "Silver", + "Cadmium", + "Indium", + "Tin", + "Antimony", + "Tellurium", + "Iodine", + "Xenon", + "Cesium", + "Barium", + "Lanthanum", + "Cerium", + "Praseodymium", + "Neodymium", + "Promethium", + "Samarium", + "Europium", + "Gadolinium", + "Terbium", + "Dysprosium", + "Holmium", + "Erbium", + "Thulium", + "Ytterbium", + "Lutetium", + "Hafnium", + "Tantalum", + "Tungsten", + "Rhenium", + "Osmium", + "Iridium", + "Platinum", + "Gold", + "Mercury", + "Thallium", + "Lead", + "Bismuth", + "Polonium", + "Astatine", + "Radon", + "Francium", + "Radium", + "Actinium", + "Thorium", + "Protactinium", + "Uranium", + "Neptunium", + "Plutonium", + "Americium", + "Curium", + "Berkelium", + "Californium", + "Einsteinium", + "Fermium", + "Mendelevium", + "Nobelium", + "Lawrencium", + "Rutherfordium", + "Dubnium", + "Seaborgium", + "Bohrium", + "Hassium", + "Meitnerium", + "Darmstadtium", + "Roentgenium", + "Copernicium", + "Nihonium", + "Flerovium", + "Moscovium", + "Livermorium", + "Tennessine", + "Oganesson", ] # from https://www.nist.gov/pml/periodic-table-elements on 30 Aug 2018 # NIST SP 966 (July 2018) longest_lived_isotope_for_unstable_elements = { - 'Tc': 98, - 'Pm': 145, - 'Po': 209, - 'At': 210, - 'Rn': 222, - 'Fr': 223, - 'Ra': 226, - 'Ac': 227, - 'Np': 237, - 'Pu': 244, - 'Am': 243, - 'Cm': 247, - 'Bk': 247, - 'Cf': 251, - 'Es': 252, - 'Fm': 257, - 'Md': 258, - 'No': 259, - 'Lr': 266, - 'Rf': 267, - 'Db': 268, - 'Sg': 271, - 'Bh': 270, - 'Hs': 269, - 'Mt': 278, - 'Ds': 281, - 'Rg': 282, - 'Cn': 285, - 'Nh': 286, - 'Fl': 289, - 'Mc': 289, - 'Lv': 293, - 'Ts': 294, - 'Og': 294, + "Tc": 98, + "Pm": 145, + "Po": 209, + "At": 210, + "Rn": 222, + "Fr": 223, + "Ra": 226, + "Ac": 227, + "Np": 237, + "Pu": 244, + "Am": 243, + "Cm": 247, + "Bk": 247, + "Cf": 251, + "Es": 252, + "Fm": 257, + "Md": 258, + "No": 259, + "Lr": 266, + "Rf": 267, + "Db": 268, + "Sg": 271, + "Bh": 270, + "Hs": 269, + "Mt": 278, + "Ds": 281, + "Rg": 282, + "Cn": 285, + "Nh": 286, + "Fl": 289, + "Mc": 289, + "Lv": 293, + "Ts": 294, + "Og": 294, } data_url = "https://nist.gov/srd/srd_data//srd144_Atomic_Weights_and_Isotopic_Compositions_for_All_Elements.json" title = "Atomic Weights and Isotopic Compositions with Relative Atomic Masses - SRD144" date_modified = "2011-01-14" -year = date_modified.split('-')[0] +year = date_modified.split("-")[0] doi = "10.1351/PAC-REP-10-06-02" url = data_url access_date = str(datetime.datetime.utcnow()) @@ -196,92 +196,97 @@ """ -'''.format(year, title, date_modified, doi, url, access_date) +'''.format( + year, title, date_modified, doi, url, access_date +) atomic_weights_json = {"title": title, "date": date_modified, "doi": doi, "url": url, "access_data": access_date} # length number of elements Z = [0] # , 1, 2, ... -E = ['X'] # , H, He, ... -name = ['Dummy'] # , Hydrogen, Helium, ... +E = ["X"] # , H, He, ... +name = ["Dummy"] # , Hydrogen, Helium, ... # length number of elements plus number of isotopes -_EE = ['X', 'X'] # , H, H, H, ..., He, He, He, ... -EA = ['X', 'X0'] # , H, H1, H2, ..., He, He3, He4, ... +_EE = ["X", "X"] # , H, H, H, ..., He, He, He, ... +EA = ["X", "X0"] # , H, H1, H2, ..., He, He3, He4, ... A = [0, 0] # , 1, 1, 2, ..., 4, 3, 4, ... masses = ["0", "0"] # , 1.0078, 1.0078, 2.014, ..., 4.0026, 3.016, 4.0026, ...V uncertain_value = re.compile(r"""(?P[\d.]+)(?P\([\d#]+\))?""") -aliases = {'D': 'H2', 'T': 'H3'} +aliases = {"D": "H2", "T": "H3"} -newnames = {'Uut': 'Nh', 'Uup': 'Mc', 'Uus': 'Ts'} -for delem in atomic_weights_data['data']: - symbol = delem['Atomic Symbol'] - delem['Atomic Symbol'] = newnames.get(symbol, symbol) - for diso in delem['isotopes']: - symbol = diso['Atomic Symbol'] - diso['Atomic Symbol'] = newnames.get(symbol, symbol) +newnames = {"Uut": "Nh", "Uup": "Mc", "Uus": "Ts"} +for delem in atomic_weights_data["data"]: + symbol = delem["Atomic Symbol"] + delem["Atomic Symbol"] = newnames.get(symbol, symbol) + for diso in delem["isotopes"]: + symbol = diso["Atomic Symbol"] + diso["Atomic Symbol"] = newnames.get(symbol, symbol) # element loop -for delem in atomic_weights_data['data']: +for delem in atomic_weights_data["data"]: mass_of_most_common_isotope = None mass_number_of_most_common_isotope = None max_isotopic_contribution = 0.0 # isotope loop - for diso in delem['isotopes']: - mobj = re.match(uncertain_value, diso['Relative Atomic Mass']) + for diso in delem["isotopes"]: + mobj = re.match(uncertain_value, diso["Relative Atomic Mass"]) if mobj: - mass = mobj.group('value') + mass = mobj.group("value") else: - raise ValueError('Trouble parsing mass string ({}) for element ({})'.format( - diso['Relative Atomic Mass'], diso['Atomic Symbol'])) + raise ValueError( + "Trouble parsing mass string ({}) for element ({})".format( + diso["Relative Atomic Mass"], diso["Atomic Symbol"] + ) + ) - a = int(diso['Mass Number']) + a = int(diso["Mass Number"]) - if diso['Atomic Symbol'] in aliases: - _EE.append('H') - EA.append(aliases[diso['Atomic Symbol']]) + if diso["Atomic Symbol"] in aliases: + _EE.append("H") + EA.append(aliases[diso["Atomic Symbol"]]) A.append(a) masses.append(mass) - _EE.append('H') - EA.append(diso['Atomic Symbol']) + _EE.append("H") + EA.append(diso["Atomic Symbol"]) A.append(a) masses.append(mass) else: - _EE.append(diso['Atomic Symbol']) - EA.append(diso['Atomic Symbol'] + diso['Mass Number']) + _EE.append(diso["Atomic Symbol"]) + EA.append(diso["Atomic Symbol"] + diso["Mass Number"]) A.append(a) masses.append(mass) - if 'Isotopic Composition' in diso: - mobj = re.match(uncertain_value, diso['Isotopic Composition']) + if "Isotopic Composition" in diso: + mobj = re.match(uncertain_value, diso["Isotopic Composition"]) if mobj: - if float(mobj.group('value')) > max_isotopic_contribution: + if float(mobj.group("value")) > max_isotopic_contribution: mass_of_most_common_isotope = mass mass_number_of_most_common_isotope = a - max_isotopic_contribution = float(mobj.group('value')) + max_isotopic_contribution = float(mobj.group("value")) # Source atomic_weights_and_isotopic_compositions_for_all_elements deals with isotopic composition of # stable elements. For unstable elements, need another source for the longest-lived isotope. if mass_of_most_common_isotope is None: - mass_number_of_most_common_isotope = longest_lived_isotope_for_unstable_elements[diso['Atomic Symbol']] - eliso = delem['Atomic Symbol'] + str(mass_number_of_most_common_isotope) + mass_number_of_most_common_isotope = longest_lived_isotope_for_unstable_elements[diso["Atomic Symbol"]] + eliso = delem["Atomic Symbol"] + str(mass_number_of_most_common_isotope) mass_of_most_common_isotope = masses[EA.index(eliso)] - _EE.append(delem['Atomic Symbol']) - EA.append(delem['Atomic Symbol']) + _EE.append(delem["Atomic Symbol"]) + EA.append(delem["Atomic Symbol"]) A.append(mass_number_of_most_common_isotope) masses.append(mass_of_most_common_isotope) - z = int(delem['Atomic Number']) + z = int(delem["Atomic Number"]) Z.append(z) - E.append(delem['Atomic Symbol']) + E.append(delem["Atomic Symbol"]) name.append(element_names[z - 1].capitalize()) atomic_weights_json["Z"] = Z diff --git a/raw_data/nist_data/build_physical_constants.py b/raw_data/nist_data/build_physical_constants.py index f53dffda..772d598f 100644 --- a/raw_data/nist_data/build_physical_constants.py +++ b/raw_data/nist_data/build_physical_constants.py @@ -2,8 +2,9 @@ This file will generate a JSON blob usable by QCElemental for physical constants """ -import json import datetime +import json + import requests from yapf.yapflib.yapf_api import FormatCode @@ -13,9 +14,9 @@ title = metadata["title"] date_modified = metadata["modified"] -year = date_modified.split('-')[0] -doi = metadata['distribution'][-1]['accessURL'].strip('https://dx.doi.org/') -url = metadata['distribution'][0]['downloadURL'] +year = date_modified.split("-")[0] +doi = metadata["distribution"][-1]["accessURL"].strip("https://dx.doi.org/") +url = metadata["distribution"][0]["downloadURL"] access_date = str(datetime.datetime.utcnow()) constants = requests.get(url).json() @@ -33,7 +34,9 @@ """ -'''.format(year, title, date_modified, doi, url, access_date) +'''.format( + year, title, date_modified, doi, url, access_date +) constants_json = { "title": title, @@ -41,20 +44,20 @@ "doi": doi, "url": url, "access_data": access_date, - "constants": {} + "constants": {}, } -for pc in constants['constant']: - value = pc['Value'].strip() - uncertainty = pc['Uncertainty'] - if uncertainty == '(exact)': - value = value.replace('...', '') +for pc in constants["constant"]: + value = pc["Value"].strip() + uncertainty = pc["Uncertainty"] + if uncertainty == "(exact)": + value = value.replace("...", "") constants_json["constants"][pc["Quantity "].lower()] = { "quantity": pc["Quantity "], "unit": pc["Unit"], "value": value.replace(" ", ""), - 'uncertainty': uncertainty + "uncertainty": uncertainty, } output += "nist_{}_codata = {}".format(year, constants_json) diff --git a/scripts/build_docs.sh b/scripts/build_docs.sh new file mode 100644 index 00000000..a6e06e10 --- /dev/null +++ b/scripts/build_docs.sh @@ -0,0 +1,3 @@ +set -xe +# Run tests +poetry run sphinx-build docs/ build/docs diff --git a/scripts/format.sh b/scripts/format.sh new file mode 100644 index 00000000..c7bbc692 --- /dev/null +++ b/scripts/format.sh @@ -0,0 +1,7 @@ +#!/bin/sh -e + +set -x + +poetry run autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place . --exclude=__init__.py +poetry run black . +poetry run isort . diff --git a/scripts/test.sh b/scripts/test.sh new file mode 100644 index 00000000..862bf8c4 --- /dev/null +++ b/scripts/test.sh @@ -0,0 +1,3 @@ +set -xe +# Run tests +poetry run pytest --cov-report html:htmlcov --cov diff --git a/setup.cfg b/setup.cfg index 6a39f7bd..476edff4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,76 +1,10 @@ -# Helper file to handle all configs - -[coverage:run] -# .coveragerc to control coverage.py and pytest-cov -# Omit the test directory from test coverage -omit = - */tests/* - qcelemental/_version.py - -[tool:isort] -line_length=120 -skip_glob = - */data/nist* -skip=__init__.py -include_trailing_comma=True -force_grid_wrap=0 -use_parentheses=True -multi_line_output=3 - - -[yapf] -# YAPF, in .style.yapf files this shows up as "[style]" header -COLUMN_LIMIT = 119 -INDENT_WIDTH = 4 -USE_TABS = False - [flake8] -# Flake8, PyFlakes, etc -max-line-length = 119 - -[versioneer] -# Automatic version numbering scheme -VCS = git -style = pep440 -versionfile_source = qcelemental/_version.py -versionfile_build = qcelemental/_version.py -tag_prefix = '' - -[aliases] -test=pytest - -[mypy] - -[mypy-numpy] -ignore_missing_imports = True - -[mypy-numpy.*] -ignore_missing_imports = True - -[mypy-pytest] -ignore_missing_imports = True - -[mypy-networkx] -ignore_missing_imports = True - -[mypy-pint] -ignore_missing_imports = True - -[mypy-mpmath] -ignore_missing_imports = True - -[mypy-scipy] -ignore_missing_imports = True - -[mypy-msgpack] -ignore_missing_imports = True - -[mypy-qcelemental._version] -ignore_errors = True - -[mypy-qcelemental.models.molecule] -# Disable all the Optional settings in Molecule since many are handled by the to_schema function -strict_optional = False - -[mypy-qcelemental.tests.*] -ignore_errors = True +ignore = E203, E266, E501, W503 +max-line-length = 120 +max-complexity = 18 +select = B,C,E,F,W,T4 +exclude = + # No need to traverse our git directory + .git, + # There's no value in checking cache directories + __pycache__ diff --git a/setup.py b/setup.py deleted file mode 100644 index 723504d7..00000000 --- a/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -import os -import sys -import setuptools -import versioneer - -short_description = "QCElemental is a resource module for quantum chemistry containing physical" -"constants and periodic table data from NIST and molecule handlers." - -# from https://github.com/pytest-dev/pytest-runner#conditional-requirement -needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv) -pytest_runner = ['pytest-runner'] if needs_pytest else [] - -try: - with open("README.md", "r") as handle: - long_description = handle.read() -except FileNotFoundError: - long_description = short_description - -if __name__ == "__main__": - setuptools.setup( - name='qcelemental', - description='Essentials for Quantum Chemistry.', - author='The QCArchive Development Team', - author_email='qcarchive@molssi.org', - url="https://github.com/MolSSI/QCElemental", - license='BSD-3C', - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), - packages=setuptools.find_packages(exclude=['*checkup*']), - include_package_data=True, - package_data={'': [os.path.join('qcelemental', 'data', '*.json')]}, - setup_requires=[] + pytest_runner, - python_requires='>=3.6', - install_requires=["numpy >= 1.12.0", "pint >= 0.10.0", "pydantic >=1.8.2"], - extras_require={ - 'docs': [ - 'numpydoc', - 'sphinx', # autodoc was broken in 1.3.1 - 'sphinxcontrib-napoleon', - 'sphinx_rtd_theme', - "autodoc-pydantic", - ], - 'tests': [ - 'pytest >= 4.0.0', - 'pytest-cov', - # 'jsonschema', # needed for speciality `pytest --validate` - ], - 'align': [ - 'networkx>=2.4.0', - ], - 'viz': [ - 'nglview', - ], - 'lint': [ - 'autoflake', - 'black', - 'isort', - ], - }, - tests_require=[ - 'pytest >= 4.0.0', - 'pytest-cov', - # 'jsonschema', # needed for speciality `pytest --validate` - ], - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Science/Research', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - ], - zip_safe=False, - long_description=long_description, - long_description_content_type="text/markdown") diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 64fea1c8..00000000 --- a/versioneer.py +++ /dev/null @@ -1,1822 +0,0 @@ - -# Version: 0.18 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) - -This is a tool for managing a recorded version number in distutils-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) -* run `versioneer install` in your source tree, commit the results - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -""" - -from __future__ import print_function -try: - import configparser -except ImportError: - import ConfigParser as configparser -import errno -import json -import os -import re -import subprocess -import sys - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) - VCS = parser.get("versioneer", "VCS") # mandatory - - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -LONG_VERSION_PY['git'] = ''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%s*" % tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: - pass - if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except EnvironmentError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 - - cmds = {} - - # we add "version" to both distutils and setuptools - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -INIT_PY_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - - -def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (EnvironmentError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - if INIT_PY_SNIPPET not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1)