Skip to content

Commit

Permalink
Merge remote-tracking branch 'trunk/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
gmatteo committed Apr 15, 2024
2 parents bfb30a6 + 0e57abf commit 82946c4
Show file tree
Hide file tree
Showing 462 changed files with 31,615 additions and 21,676 deletions.
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
pymatgen/io/ase.py @Andrew-S-Rosen
pymatgen/io/abinit/* @gmatteo
pymatgen/io/lobster/* @JaGeo
pymatgen/ext/* @ml-evs
tests/ext/* @ml-evs
2 changes: 2 additions & 0 deletions .github/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ changelog:
labels: [housekeeping]
- title: 🚀 Performance
labels: [performance]
- title: 🚧 CI
labels: [ci]
- title: 💡 Refactoring
labels: [refactor]
- title: 🧪 Tests
Expand Down
42 changes: 42 additions & 0 deletions .github/workflows/issue-metrics.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
name: Monthly issue metrics
on:
workflow_dispatch:
schedule:
- cron: '3 2 1 * *'

permissions:
contents: read

jobs:
build:
name: issue metrics
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: read
steps:
- name: Get dates for last month
shell: bash
run: |
# Calculate the first day of the previous month
first_day=$(date -d "last month" +%Y-%m-01)
# Calculate the last day of the previous month
last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d)
#Set an environment variable with the date range
echo "$first_day..$last_day"
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
- name: Run issue-metrics tool
uses: github/issue-metrics@v3
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SEARCH_QUERY: 'repo:materialsproject/pymatgen is:issue created:${{ env.last_month }} -reason:"not planned"'

- name: Create issue
uses: peter-evans/create-issue-from-file@v5
with:
title: Monthly issue metrics report
token: ${{ secrets.GITHUB_TOKEN }}
content-filepath: ./issue_metrics.md
15 changes: 7 additions & 8 deletions .github/workflows/jekyll-gh-pages.yml
Original file line number Diff line number Diff line change
@@ -1,15 +1,11 @@
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
name: Deploy Jekyll with GitHub Pages dependencies preinstalled

on:
# Runs on pushes targeting the default branch
push:
branches: ["master"]
workflow_dispatch: # enable manual workflow execution

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
# Set permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
Expand All @@ -22,23 +18,26 @@ concurrency:
cancel-in-progress: false

jobs:
# Build job
build:
# prevent this action from running on forks
if: github.repository == 'materialsproject/pymatgen'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Pages
uses: actions/configure-pages@v3

- name: Build with Jekyll
uses: actions/jekyll-build-pages@v1
with:
source: ./docs
destination: ./_site

- name: Upload artifact
uses: actions/upload-pages-artifact@v2

# Deployment job
deploy:
environment:
name: github-pages
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
- name: ruff
run: |
ruff --version
ruff .
ruff check .
ruff format --check .
- name: mypy
Expand Down
65 changes: 22 additions & 43 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ jobs:
test:
# prevent this action from running on forks
if: github.repository == 'materialsproject/pymatgen'
defaults:
run:
shell: bash -l {0} # enables conda/mamba env activation by reading bash profile
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -48,65 +51,41 @@ jobs:
- name: Check out repo
uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: setup.py
- name: Set up micromamba
uses: mamba-org/setup-micromamba@main

- name: Create mamba environment
run: |
micromamba create -n pmg python=${{ matrix.python-version }} --yes
- name: Install uv
run: pip install uv
run: micromamba run -n pmg pip install uv

- name: Copy GULP to bin
if: matrix.os == 'ubuntu-latest'
run: |
sudo cp cmd_line/gulp/Linux_64bit/* /usr/local/bin/
- name: Install Bader
- name: Install ubuntu-only conda dependencies
if: matrix.os == 'ubuntu-latest'
run: |
wget https://theory.cm.utexas.edu/henkelman/code/bader/download/bader_lnx_64.tar.gz
tar xvzf bader_lnx_64.tar.gz
sudo mv bader /usr/local/bin/
continue-on-error: true # This is not critical to succeed.
micromamba install -n pmg -c conda-forge enumlib packmol bader openbabel openff-toolkit --yes
- name: Install Enumlib
if: matrix.os == 'ubuntu-latest'
- name: Install pymatgen and dependencies
run: |
git clone --recursive https://github.com/msg-byu/enumlib.git
cd enumlib/symlib/src
export F90=gfortran
make
cd ../../src
make enum.x
sudo mv enum.x /usr/local/bin/
cd ..
sudo cp aux_src/makeStr.py /usr/local/bin/
continue-on-error: true # This is not critical to succeed.

- name: Install Packmol
if: matrix.os == 'ubuntu-latest'
run: |
wget -O packmol.tar.gz https://github.com/m3g/packmol/archive/refs/tags/v20.14.2.tar.gz
tar xvzf packmol.tar.gz
export F90=gfortran
cd packmol-20.14.2
./configure
make
sudo mv packmol /usr/local/bin/
cd ..
continue-on-error: true # This is not critical to succeed.

- name: Install dependencies
run: |
uv pip install numpy cython --system
micromamba activate pmg
# TODO remove temporary fix. added since uv install torch is flaky.
# track https://github.com/astral-sh/uv/issues/1921 for resolution
pip install torch
uv pip install numpy cython
uv pip install -e '.[dev,optional]' --system
uv pip install --editable '.[dev,optional]'
# TODO remove next line installing ase from main branch when FrechetCellFilter is released
uv pip install --upgrade 'ase@git+https://gitlab.com/ase/ase' --system
uv pip install --upgrade 'git+https://gitlab.com/ase/ase'
- name: pytest split ${{ matrix.split }}
run: |
micromamba activate pmg
pytest --splits 10 --group ${{ matrix.split }} --durations-path tests/files/.pytest-split-durations tests
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@ ci:

repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.3
rev: v0.3.7
hooks:
- id: ruff
args: [--fix, --unsafe-fixes]
- id: ruff-format

- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.6.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
Expand Down
8 changes: 6 additions & 2 deletions dev_scripts/chemenv/equivalent_indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,9 +99,9 @@
# 0. any point
for i0 in range(8):
# 1. point opposite to point 0. in the square face
if i0 in [0, 2]:
if i0 in {0, 2}:
i1 = i0 + 1
elif i0 in [1, 3]:
elif i0 in {1, 3}:
i1 = i0 - 1
elif i0 == 4:
i1 = 7
Expand All @@ -111,10 +111,14 @@
i1 = 5
elif i0 == 7:
i1 = 4
else:
raise RuntimeError("Cannot determine point.")

# 2. one of the two last points in the square face
sfleft = list(sf1) if i0 in sf1 else list(sf2)
sfleft.remove(i0)
sfleft.remove(i1)
i2 = 0
for i2 in sfleft:
sfleft2 = list(sfleft)
sfleft2.remove(i2)
Expand Down
2 changes: 1 addition & 1 deletion dev_scripts/chemenv/get_plane_permutations_optimized.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ def random_permutations_iterator(initial_permutation, n_permutations):
f"Get the explicit optimized permutations for geometry {cg.name!r} (symbol : "
f'{cg_symbol!r}) ? ("y" to confirm, "q" to quit)\n'
)
if test not in ["y", "q"]:
if test not in ("y", "q"):
print("Wrong key, try again")
continue
if test == "y":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,17 +151,18 @@ def get_structure(self, morphing_factor):

coords = copy.deepcopy(self.abstract_geometry.points_wcs_ctwcc())
bare_points = self.abstract_geometry.bare_points_with_centre
origin = None

for morphing in self.morphing_description:
if morphing["site_type"] == "neighbor":
i_site = morphing["ineighbor"] + 1
if morphing["expansion_origin"] == "central_site":
origin = bare_points[0]
vector = bare_points[i_site] - origin
coords[i_site] += vector * (morphing_factor - 1.0)
else:
if morphing["site_type"] != "neighbor":
raise ValueError(f"Key \"site_type\" is {morphing['site_type']} while it can only be neighbor")

i_site = morphing["ineighbor"] + 1
if morphing["expansion_origin"] == "central_site":
origin = bare_points[0]
vector = bare_points[i_site] - origin
coords[i_site] += vector * (morphing_factor - 1.0)

return Structure(lattice=lattice, species=species, coords=coords, coords_are_cartesian=True)

def estimate_parameters(self, dist_factor_min, dist_factor_max, symmetry_measure_type="csm_wcs_ctwcc"):
Expand Down Expand Up @@ -269,7 +270,7 @@ def get_weights(self, weights_options):
"+-------------------------------------------------------------+\n"
)

with open("ce_pairs.json") as file:
with open("ce_pairs.json", encoding="utf-8") as file:
ce_pairs = json.load(file)
self_weight_max_csms: dict[str, list[float]] = {}
self_weight_max_csms_per_cn: dict[str, list[float]] = {}
Expand Down
1 change: 1 addition & 0 deletions dev_scripts/chemenv/view_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
print()
# Visualize the separation plane of a given algorithm
sep_plane = False
algo = None
if any(algo.algorithm_type == SEPARATION_PLANE for algo in cg.algorithms):
test = input("Enter index of the algorithm for which you want to visualize the plane : ")
if test != "":
Expand Down
26 changes: 13 additions & 13 deletions dev_scripts/potcar_scrambler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import shutil
import warnings
from glob import glob
from typing import TYPE_CHECKING

import numpy as np
from monty.os.path import zpath
Expand All @@ -14,6 +15,9 @@
from pymatgen.io.vasp.sets import _load_yaml_config
from pymatgen.util.testing import VASP_IN_DIR

if TYPE_CHECKING:
from typing_extensions import Self


class PotcarScrambler:
"""
Expand All @@ -34,26 +38,22 @@ class PotcarScrambler:
from existing POTCAR `input_filename`
"""

def __init__(self, potcars: Potcar | PotcarSingle):
if isinstance(potcars, PotcarSingle):
self.PSP_list = [potcars]
else:
self.PSP_list = potcars
def __init__(self, potcars: Potcar | PotcarSingle) -> None:
self.PSP_list = [potcars] if isinstance(potcars, PotcarSingle) else potcars
self.scrambled_potcars_str = ""
for psp in self.PSP_list:
scrambled_potcar_str = self.scramble_single_potcar(psp)
self.scrambled_potcars_str += scrambled_potcar_str
return

def _rand_float_from_str_with_prec(self, input_str: str, bloat: float = 1.5):
def _rand_float_from_str_with_prec(self, input_str: str, bloat: float = 1.5) -> float:
n_prec = len(input_str.split(".")[1])
bd = max(1, bloat * abs(float(input_str)))
return round(bd * np.random.rand(1)[0], n_prec)

def _read_fortran_str_and_scramble(self, input_str: str, bloat: float = 1.5):
input_str = input_str.strip()

if input_str.lower() in ("t", "f", "true", "false"):
if input_str.lower() in {"t", "f", "true", "false"}:
return bool(np.random.randint(2))

if input_str.upper() == input_str.lower() and input_str[0].isnumeric():
Expand All @@ -68,7 +68,7 @@ def _read_fortran_str_and_scramble(self, input_str: str, bloat: float = 1.5):
except ValueError:
return input_str

def scramble_single_potcar(self, potcar: PotcarSingle):
def scramble_single_potcar(self, potcar: PotcarSingle) -> str:
"""
Scramble the body of a POTCAR, retain the PSCTR header information.
Expand Down Expand Up @@ -124,20 +124,20 @@ def scramble_single_potcar(self, potcar: PotcarSingle):
)
return scrambled_potcar_str

def to_file(self, filename: str):
def to_file(self, filename: str) -> None:
with zopen(filename, mode="wt") as file:
file.write(self.scrambled_potcars_str)

@classmethod
def from_file(cls, input_filename: str, output_filename: str | None = None):
def from_file(cls, input_filename: str, output_filename: str | None = None) -> Self:
psp = Potcar.from_file(input_filename)
psp_scrambled = cls(psp)
if output_filename:
psp_scrambled.to_file(output_filename)
return psp_scrambled


def generate_fake_potcar_libraries():
def generate_fake_potcar_libraries() -> None:
"""
To test the `_gen_potcar_summary_stats` function in `pymatgen.io.vasp.inputs`,
need a library of fake POTCARs which do not violate copyright
Expand Down Expand Up @@ -173,7 +173,7 @@ def generate_fake_potcar_libraries():
break


def potcar_cleanser():
def potcar_cleanser() -> None:
"""
Function to replace copyrighted POTCARs used in io.vasp.sets testing
with dummy POTCARs that have scrambled PSP and kinetic energy values
Expand Down
Loading

0 comments on commit 82946c4

Please sign in to comment.