Skip to content

Commit

Permalink
Merge pull request #33 from aai-institute/bugfixes
Browse files Browse the repository at this point in the history
Bugfixe experiment Library
  • Loading branch information
fariedabuzaid authored Oct 25, 2023
2 parents e302729 + 5cd9b62 commit da28ec5
Show file tree
Hide file tree
Showing 9 changed files with 327 additions and 93 deletions.
20 changes: 20 additions & 0 deletions docs/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build

# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
57 changes: 57 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html

# -- Path setup --------------------------------------------------------------

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))


# -- Project information -----------------------------------------------------

project = 'VeriFlow'
copyright = '2023, Transferlab, appliedAI Institute for Europe'
author = 'Faried Abu Zaid'

# The full version, including alpha/beta/rc tags
release = '1.0'


# -- General configuration ---------------------------------------------------

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon'
]

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']


# -- Options for HTML output -------------------------------------------------

# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'

# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
20 changes: 20 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
.. VeriFlow documentation master file, created by
sphinx-quickstart on Sat Sep 30 23:35:52 2023.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to VeriFlow's documentation!
====================================

.. toctree::
:maxdepth: 2
:caption: Contents:



Indices and tables
==================

* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
35 changes: 35 additions & 0 deletions docs/make.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
@ECHO OFF

pushd %~dp0

REM Command file for Sphinx documentation

if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build

%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)

if "%1" == "" goto help

%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end

:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%

:end
popd
223 changes: 143 additions & 80 deletions experiments/mnist/config.yaml
Original file line number Diff line number Diff line change
@@ -1,83 +1,146 @@
---
__object__: src.explib.base.ExperimentCollection
name: mnist_basedist_comparison
name: mnist_digit_basedist_comparison
experiments:
- &exp_nice_lu_laplace
__object__: src.explib.hyperopt.HyperoptExperiment
name: mnist_nice_lu_laplace
scheduler: &scheduler
__object__: ray.tune.schedulers.ASHAScheduler
max_t: 1000000
grace_period: 1000000
reduction_factor: 2
num_hyperopt_samples: &num_hyperopt_samples 50
gpus_per_trial: &gpus_per_trial 0
cpus_per_trial: &cpus_per_trial 1
tuner_params: &tuner_params
metric: val_loss
mode: min
trial_config:
dataset: &dataset
__object__: src.explib.datasets.MnistSplit
digit: 0
epochs: &epochs 200000
patience: &patience 50
batch_size: &batch_size
__eval__: tune.choice([32])
optim_cfg: &optim
optimizer:
__class__: torch.optim.Adam
params:
lr:
__eval__: tune.loguniform(1e-4, 1e-2)
weight_decay: 0.0

model_cfg:
type:
__class__: &model src.veriflow.flows.NiceFlow
params:
coupling_layers: &coupling_layers
__eval__: tune.choice([2, 3, 4, 5, 6, 7, 8, 9, 10])
coupling_nn_layers: &coupling_nn_layers
__eval__: tune.choice([[w]*l for l in [1, 2, 3, 4] for w in [10, 20, 50, 100, 200]])
nonlinearity: &nonlinearity
__eval__: tune.choice([torch.nn.ReLU()])
split_dim:
__eval__: tune.choice([i for i in range(1, 51)])
base_distribution:
__object__: pyro.distributions.Laplace
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)
permutation: &permutation LU
- &exp_nice_lu_normal
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_lu_normal
model_cfg:
params:
base_distribution:
__object__: pyro.distributions.Normal
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)
- &exp_nice_rand_laplace
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_rand_laplace
model_cfg:
params:
permutation: random
- &exp_nice_rand_normal
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_rand_normal
model_cfg:
params:
permutation: random
base_distribution:
__object__: pyro.distributions.Normal
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)

- &digit_0
__object__: src.explib.base.ExperimentCollection
name: mnist_basedist_comparison
experiments:
- &exp_nice_lu_laplace
__object__: src.explib.hyperopt.HyperoptExperiment
name: mnist_nice_lu_laplace
scheduler: &scheduler
__object__: ray.tune.schedulers.ASHAScheduler
max_t: 1000000
grace_period: 1000000
reduction_factor: 2
num_hyperopt_samples: &num_hyperopt_samples 20
gpus_per_trial: &gpus_per_trial 0
cpus_per_trial: &cpus_per_trial 1
tuner_params: &tuner_params
metric: val_loss
mode: min
trial_config:
dataset: &dataset
__object__: src.explib.datasets.MnistSplit
digit: 0
epochs: &epochs 200000
patience: &patience 50
batch_size: &batch_size
__eval__: tune.choice([32])
optim_cfg: &optim
optimizer:
__class__: torch.optim.Adam
params:
lr:
__eval__: tune.loguniform(1e-4, 1e-2)
weight_decay: 0.0

model_cfg:
type:
__class__: &model src.veriflow.flows.NiceFlow
params:
coupling_layers: &coupling_layers
__eval__: tune.choice([2, 3, 4, 5, 6, 7, 8, 9, 10])
coupling_nn_layers: &coupling_nn_layers
__eval__: tune.choice([[w]*l for l in [1, 2, 3, 4] for w in [10, 20, 50, 100, 200]])
nonlinearity: &nonlinearity
__eval__: tune.choice([torch.nn.ReLU()])
split_dim:
__eval__: tune.choice([i for i in range(1, 51)])
base_distribution:
__object__: pyro.distributions.Laplace
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)
permutation: &permutation LU
- &exp_nice_lu_normal
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_lu_normal
trial_config:
model_cfg:
params:
base_distribution:
__exact__:
__object__: pyro.distributions.Normal
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)
- &exp_nice_rand_laplace
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_rand_laplace
trial_config:
model_cfg:
params:
permutation: random
- &exp_nice_rand_normal
__overwrites__: *exp_nice_lu_laplace
name: mnist_nice_rand_normal
trial_config:
model_cfg:
params:
permutation: random
base_distribution:
__exact__:
__object__: pyro.distributions.Normal
loc:
__eval__: torch.zeros(100)
scale:
__eval__: torch.ones(100)
- &digit_1
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 1
- &digit_2
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 2
- &digit_3
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 3
- &digit_4
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 4
- &digit_5
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 5
- &digit_6
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 6
- &digit_7
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 7
- &digit_8
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 8
- &digit_9
__overwrites__: *digit_0
experiments:
trial_config:
dataset:
digit: 9

Loading

0 comments on commit da28ec5

Please sign in to comment.