Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions .github/workflows/build_dist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,16 @@ jobs:
with:
python-version: "3.10"

- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: "0.9.2"
python-version: "3.10"
enable-cache: true
- name: Install dependencies
run: |
python -m venv ./venv
. ./venv/bin/activate
python -m pip install --upgrade pip
pip install poetry
poetry install --with=dev --no-interaction --no-ansi
uv venv .venv
uv sync --locked --dev --extra cpu

- name: Gather new package version
id: version
Expand All @@ -45,14 +48,12 @@ jobs:

- name: Bump package version in pyproject.toml
run: |
. ./venv/bin/activate
poetry version ${{steps.version.outputs.new_tag}}
uv version ${{steps.version.outputs.new_tag}}

- name: Build dist
run: |
. ./venv/bin/activate
python -m build --sdist --wheel --no-isolation --outdir dist/ .
twine check dist/*
uv run python -m build --sdist --wheel --no-isolation --outdir dist/ .
uv run twine check dist/*

- name: Commit updated pyproject.toml
run: |
Expand Down
25 changes: 13 additions & 12 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,23 @@ jobs:
with:
python-version: "3.10"

- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: "0.9.2"
python-version: "3.10"
enable-cache: true
- name: Install dependencies
run: |
python -m venv ./venv
. ./venv/bin/activate
python -m pip install --upgrade pip
pip install poetry
poetry install --with=docs --no-interaction --no-ansi
uv venv .venv
uv sync --locked --dev --group docs --extra cpu

- name: Build the sphinx docs
run: |
. ./venv/bin/activate
sphinx-apidoc -f -o ./sphinx/source ./src/bolightningpipeline
make -C sphinx clean
python sphinx/clean_html_files.py
make -C sphinx html
uv run sphinx-apidoc -f -o ./sphinx/source ./src/bolightningpipeline
uv run make -C sphinx clean
uv run python sphinx/clean_html_files.py
uv run make -C sphinx html
touch sphinx/build/html/.nojekyll

- name: Checkout gh-pages branch
Expand All @@ -41,9 +43,8 @@ jobs:

- name: Copy build files to docs folder
run: |
. ./venv/bin/activate
cp -a sphinx/build/html/. docs/
python sphinx/make_html_files_list.py
uv run python sphinx/make_html_files_list.py
rm -rf sphinx/build

- name: Commit to gh-pages branch
Expand Down
48 changes: 24 additions & 24 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,31 +27,30 @@ jobs:
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: "0.9.2"
python-version: ${{ matrix.python-version }}
enable-cache: true
- name: Install dependencies
run: |
python -m venv ./venv
. ./venv/bin/activate
python -m pip install --upgrade pip
pip install poetry
poetry install --with=dev --no-interaction --no-ansi
uv venv .venv
uv sync --locked --dev --extra cpu
- name: Test Linting
run: |
. ./venv/bin/activate
black src tests --check --diff
isort src tests --check-only --diff
uv run black src tests --check --diff
uv run isort src tests --check-only --diff
- name: Test Typing
run: |
. ./venv/bin/activate
mypy src tests
uv run mypy src tests
- name: Test Notebooks
run: |
. ./venv/bin/activate
pytest --nbmake notebooks -n=auto --nbmake-kernel=python3 --nbmake-timeout=600 # 10 minutes timeout
uv run pytest --nbmake notebooks -n=auto --nbmake-kernel=python3 --nbmake-timeout=600 # 10 minutes timeout

- name: Test Unittests with pytest
run: |
. ./venv/bin/activate
pytest tests -n=auto --cov=src --cov-report="xml:tests/.tmp/coverage.xml" --cov-report=term-missing --durations=10
uv run pytest tests -n=auto --cov=src --cov-report="xml:tests/.tmp/coverage.xml" --cov-report=term-missing --durations=10

- name: Code Coverage
uses: orgoro/coverage@v3.2
Expand All @@ -65,9 +64,8 @@ jobs:
- name: Test Build
if: ${{ matrix.python-version == '3.10' }}
run: |
. ./venv/bin/activate
python -m build --sdist --wheel --no-isolation --outdir dist/ .
twine check dist/*
uv run python -m build --sdist --wheel --no-isolation --outdir dist/ .
uv run twine check dist/*

Run-tests-on-Windows:
name: Run tests on Windows-latest
Expand All @@ -82,14 +80,16 @@ jobs:
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
version: "0.9.2"
python-version: ${{ matrix.python-version }}
enable-cache: true
- name: Install dependencies
run: |
python -m venv ./venv
. ./venv/Scripts/activate
python -m pip install --upgrade pip
pip install poetry
poetry install --with=dev --no-interaction --no-ansi
uv venv .venv
uv sync --locked --dev --extra cpu
- name: Test Unittests with pytest
run: |
. ./venv/Scripts/activate
pytest tests -n=auto
uv run pytest tests -n=auto
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -303,3 +303,5 @@ tmp_report_dir
.tmp_report.json
coverage.json
/notebooks/Digits2D
/notebooks/Cifar10
/notebooks/data
25 changes: 14 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,29 +30,32 @@ With `python` and `pip` installed,
pip install git+https://github.com/MatchCake/MatchCake-Opt
```

With `poetry` installed,
With `uv` installed,
```bash
poetry add "git+https://github.com/MatchCake/MatchCake-Opt"
uv add "git+https://github.com/MatchCake/MatchCake-Opt"
```

To install the package with cu128 (CUDA), add `--extra cu128` to the installation commands above.


### For developers

With `python` and `pip` installed, run the following commands to install the dependencies:
```bash
python -m venv .venv
source .venv/bin/activate
pip install poetry
python -m poetry install
pip install uv
uv sync --dev
```

With `poetry` installed, run the following commands to install the dependencies:
With `uv` installed, run the following commands to install the dependencies:
```bash
python -m venv .venv
source .venv/bin/activate
poetry install
uv venv .venv
uv sync --dev
```

If you'd like to contribute to this repository, please do so by submitting pull requests to the `dev` branch. Thank you!

## Quick Usage Exemple

```python
Expand All @@ -78,7 +81,7 @@ class LinearNN(ClassificationModel):
RangeParameterConfig(
name="n_neurons",
parameter_type="int",
bounds=(4, 16),
bounds=(4, 2048),
),
]

Expand All @@ -92,7 +95,7 @@ class LinearNN(ClassificationModel):
):
super().__init__(input_shape=input_shape, output_shape=output_shape, learning_rate=learning_rate, **kwargs)
self.save_hyperparameters("learning_rate", "n_neurons")
self.nn = torch.Sequential(
self.nn = torch.nn.Sequential(
torch.nn.Flatten(),
torch.nn.LazyLinear(n_neurons),
torch.nn.ReLU(),
Expand All @@ -106,7 +109,7 @@ class LinearNN(ClassificationModel):
def output_size(self):
return int(np.prod(self.output_shape))

datamodule = DataModule.from_dataset_name("Digits2D")
datamodule = DataModule.from_dataset_name("Digits2D", fold_id=0)
automl_pipeline = AutoMLPipeline(model_cls=LinearNN, datamodule=datamodule)
automl_pipeline.run()
lt_pipeline, metrics = automl_pipeline.run_best_pipeline()
Expand Down
66 changes: 42 additions & 24 deletions notebooks/automl_pipeline_tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
"from pathlib import Path\n",
"from typing import Optional\n",
"import json\n",
"import warnings\n",
"warnings.filterwarnings(\"ignore\")\n",
"\n",
"import numpy as np\n",
"import pennylane as qml\n",
Expand Down Expand Up @@ -53,6 +55,18 @@
"cell_type": "code",
"source": [
"class NIFCNN(ClassificationModel):\n",
" \"\"\"\n",
" Non-Interacting fermions classifier through CNN hamiltonian encoding. The flow of information\n",
" in the model goes as follows.\n",
"\n",
" 1. CNN(X_{bchw}) -> X_{be}: Build the embeddings with the CNN encoder\n",
" 2. Linear(X_{be}) -> W_{bn}: Build the local fields weights from the embeddings\n",
" 3. Linear(X_{be}) -> W_{bt}: Build the ZZ couplings weights from the embeddings\n",
" - We now have the hamiltonian: H(X_{be}) = W_{bn} Z_{n} + W_{bt} ZZ_{t}\n",
" 4. P_{knu}, P_{ktv}: We compute the probabilities to get the eigenstates of Z and ZZ\n",
" 5. E_{k} = W_{bn} P_{knu} Lambda_{u} + W_{bt} P_{ktv} Lambda_{v}: We now compute the expected values with Lambda_{u} and Lambda_{v} the eigenvalues of Z and ZZ respectively.\n",
" 6. Softmax(E_{k}) -> Y_{k}: Finally, we apply a softmax to get the probabilities of each class.\n",
" \"\"\"\n",
" MODEL_NAME = \"NIFCNN\"\n",
" DEFAULT_N_QUBITS = 16\n",
" DEFAULT_LEARNING_RATE = 2e-4\n",
Expand All @@ -68,7 +82,7 @@
" RangeParameterConfig(\n",
" name=\"n_qubits\",\n",
" parameter_type=\"int\",\n",
" bounds=(4, 16),\n",
" bounds=(4, 92),\n",
" ),\n",
" ChoiceParameterConfig(\n",
" name=\"encoder_output_activation\",\n",
Expand Down Expand Up @@ -118,21 +132,13 @@
" torch.nn.LazyLinear(self._hamiltonian_n_params),\n",
" getattr(torch.nn, encoder_output_activation)()\n",
" )\n",
" self.xx_body_couplings_head = torch.nn.Sequential(\n",
" torch.nn.Flatten(),\n",
" torch.nn.LazyLinear(self._hamiltonian_n_params),\n",
" getattr(torch.nn, encoder_output_activation)()\n",
" )\n",
"\n",
" self.zz_body_coupling_weights = torch.nn.Parameter(torch.randn(self._hamiltonian_n_params), requires_grad=True)\n",
" self.xx_body_coupling_weights = torch.nn.Parameter(torch.randn(self._hamiltonian_n_params), requires_grad=True)\n",
"\n",
" self.local_fields_op_eigvals = torch.nn.Parameter(torch.from_numpy(np.array([1.0, -1.0])).float(),\n",
" requires_grad=False) # eigvals(Z)\n",
" self.zz_eigvals = torch.nn.Parameter(torch.from_numpy(np.array([1.0, -1.0, -1.0, 1.0])).float(),\n",
" requires_grad=False) # eigvals(ZZ)\n",
" self.xx_eigvals = torch.nn.Parameter(torch.from_numpy(np.array([1.0, -1.0, 1.0, -1.0])).float(),\n",
" requires_grad=False) # eigvals(XX)\n",
"\n",
" self.local_fields_wires = [[i] for i in range(self.n_qubits)]\n",
" self.couplings_wires = [[i, j] for i, j in np.vstack(np.triu_indices(self.n_qubits, k=1)).T]\n",
Expand All @@ -153,7 +159,6 @@
"\n",
" local_fields = self.local_fields_head(embeddings)\n",
" zz_couplings = self.zz_body_couplings_head(embeddings)\n",
" xx_couplings = self.xx_body_couplings_head(embeddings)\n",
" self.q_device.execute_generator(self.circuit_gen(), reset=True)\n",
"\n",
" local_fields_probs = (\n",
Expand All @@ -170,11 +175,8 @@
" weighted_zz_eigvals = torch.einsum(\n",
" \"bi,kij,j->bk\", zz_couplings, couplings_probs, self.zz_eigvals\n",
" )\n",
" weighted_xx_eigvals = torch.einsum(\n",
" \"bi,kij,j->bk\", xx_couplings, couplings_probs, self.xx_eigvals\n",
" )\n",
"\n",
" expval = weighted_local_eigvals + weighted_zz_eigvals + weighted_xx_eigvals\n",
" expval = weighted_local_eigvals + weighted_zz_eigvals\n",
" return expval\n",
"\n",
" def circuit_gen(self):\n",
Expand Down Expand Up @@ -253,12 +255,11 @@
"model_cls = NIFCNN\n",
"\n",
"# Pipeline\n",
"job_output_folder_root = Path(os.getcwd()) / \"data\" / \"automl\"\n",
"job_output_folder = Path(dataset_name) / model_cls.MODEL_NAME\n",
"job_output_folder = Path(os.getcwd()) / \"data\" / \"automl\" / dataset_name / model_cls.MODEL_NAME\n",
"checkpoint_folder = Path(job_output_folder) / \"checkpoints\"\n",
"pipeline_args = dict(\n",
" max_epochs=100,\n",
" max_time=\"00:00:03:00\", # DD:HH:MM:SS\n",
" max_epochs=100, # increase at least to 256\n",
" max_time=\"00:00:02:00\", # DD:HH:MM:SS, increase at least to \"00:01:00:00\"\n",
")"
],
"id": "d8db16a0825411",
Expand Down Expand Up @@ -292,9 +293,9 @@
" model_cls=model_cls,\n",
" datamodule=datamodule,\n",
" checkpoint_folder=checkpoint_folder,\n",
" automl_iterations=5,\n",
" inner_max_epochs=10,\n",
" inner_max_time=\"00:00:01:00\",\n",
" automl_iterations=5, # increase at least to 32\n",
" inner_max_epochs=10, # increase at least to 128\n",
" inner_max_time=\"00:00:01:00\", # increase at least to \"00:00:10:00\"\n",
" automl_overwrite_fit=True,\n",
" **pipeline_args\n",
")"
Expand All @@ -310,19 +311,36 @@
"id": "af11b75e95e08c0c"
},
{
"metadata": {},
"metadata": {
"jupyter": {
"is_executing": true
}
},
"cell_type": "code",
"source": [
"start_time = time.perf_counter()\n",
"automl_pipeline.run()\n",
"end_time = time.perf_counter()\n",
"print(f\"Time taken: {end_time - start_time:.4f} seconds\")\n",
"print(f\"Best Hyperparameters:\\n{json.dumps(automl_pipeline.get_best_params(), indent=2, default=str)}\")"
"print(f\"Time taken: {end_time - start_time:.4f} seconds\")"
],
"id": "cf7202cce5b169d9",
"outputs": [],
"execution_count": null
},
{
"metadata": {},
"cell_type": "markdown",
"source": "We can then check the best hyperparameters found.",
"id": "cb990e91a45ed0b7"
},
{
"metadata": {},
"cell_type": "code",
"source": "print(f\"Best Hyperparameters:\\n{json.dumps(automl_pipeline.get_best_params(), indent=2, default=str)}\")",
"id": "dd640fc80cf084de",
"outputs": [],
"execution_count": null
},
{
"metadata": {},
"cell_type": "markdown",
Expand Down
Loading