Compare commits

..

43 Commits
0.2.0 ... 0.4.0

Author SHA1 Message Date
f168666045 chore(release): 0.4.0
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag This commit looks good
2022-04-10 10:21:47 -05:00
604916a829 Merge pull request 'pairs' (#5) from pairs into master
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
Reviewed-on: #5
2022-04-10 15:20:05 +00:00
941313a14c style: whitespace fixes
All checks were successful
gitea-physics/deepdog/pipeline/pr-master This commit looks good
2022-04-10 10:05:15 -05:00
cb64c0b7b6 Merge branch 'master' into pairs 2022-04-10 10:03:47 -05:00
ec7b4cac39 feat: Adds dynamic cycle count increases to help reach minimum success count
Some checks failed
gitea-physics/deepdog/pipeline/pr-master There was a failure building this commit
2022-03-28 15:46:40 -05:00
31e6cfaf51 lint: lint fixes
Some checks failed
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/pr-master There was a failure building this commit
2022-03-28 12:28:24 -05:00
c1c711f47b fix: uses bigfix from pdme for negatives
Some checks failed
gitea-physics/deepdog/pipeline/head There was a failure building this commit
2022-03-28 10:52:28 -05:00
6463b135ef feat!: Adds pair calculations, with changing api format
Some checks reported errors
gitea-physics/deepdog/pipeline/head Something is wrong with the build of this commit
2022-03-27 19:01:14 -05:00
a283cbd670 Merge pull request 'chore(deps): update dependency mypy to ^0.942' (#3) from renovate/mypy-0.x into master
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
Reviewed-on: #3
2022-03-25 13:15:07 +00:00
0b45172ca0 chore(deps): update dependency mypy to ^0.942
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/pr-master This commit looks good
2022-03-25 01:30:47 +00:00
b6383d0a47 Merge pull request 'chore(deps): update dependency mypy to ^0.941' (#2) from renovate/mypy-0.x into master
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
Reviewed-on: #2
2022-03-19 00:54:39 +00:00
450d8e0ec9 chore(deps): update dependency mypy to ^0.941
All checks were successful
gitea-physics/deepdog/pipeline/pr-master This commit looks good
2022-03-15 01:30:59 +00:00
f81904a898 chore(release): 0.3.5
All checks were successful
gitea-physics/deepdog/pipeline/tag This commit looks good
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 18:42:26 -06:00
88d961313c feat: makes chunksize configurable
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 18:42:05 -06:00
fa82caa752 chore(release): 0.3.4
All checks were successful
gitea-physics/deepdog/pipeline/tag This commit looks good
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 17:31:47 -06:00
0784cd53d7 feat: Changes chunksize for multiprocessing
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 17:31:17 -06:00
fb4b012491 chore(release): 0.3.3
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag This commit looks good
2022-03-06 17:23:15 -06:00
8617e4d274 fix: Fixes count to use cycles as well
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 17:22:52 -06:00
fe2af1644e chore(release): 0.3.2
All checks were successful
gitea-physics/deepdog/pipeline/tag This commit looks good
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 17:18:42 -06:00
e6d8d33c27 feat: Adds monte carlo cycles to trade off space and cpu 2022-03-06 17:18:24 -06:00
e00dc95f02 docs: readme badges
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 16:48:49 -06:00
527be26fb2 chore(release): 0.3.1
All checks were successful
gitea-physics/deepdog/pipeline/tag This commit looks good
gitea-physics/deepdog/pipeline/head This commit looks good
2022-03-06 16:45:45 -06:00
456c81bca5 Merge branch 'master' of ssh://gitea.deepak.science:2222/physics/deepdog 2022-03-06 16:45:16 -06:00
7284dbeb34 feat: Adds alt bayes solver with monte carlo sampler 2022-03-06 16:45:09 -06:00
d078004773 feat: Updates to pdme version for faster bayes resolution 2022-03-06 15:53:49 -06:00
0441cde421 chore: adds release to do.sh 2022-03-06 15:51:32 -06:00
a9e91779bc chore: adds standard-version release stuff 2022-03-06 15:50:39 -06:00
751bc66704 Merge pull request 'Configure Renovate' (#1) from renovate/configure into master
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
Reviewed-on: #1
2022-02-23 19:44:38 +00:00
413ff16acc chore(deps): add renovate.json 2022-02-23 19:42:04 +00:00
5118173f09 chore: Adds semantic release to dev dependencies
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-02-14 10:00:59 -06:00
semantic-release
6dfc26104a 0.3.0
Some checks reported errors
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag Something is wrong with the build of this commit
Automatically generated by python-semantic-release
2022-02-14 09:57:42 -06:00
3a6be738b1 feat: Actually uses probabilities to update bayes
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
2022-02-14 09:51:02 -06:00
bd240900b4 fix: Actually logs end threshold 2022-02-14 09:50:08 -06:00
0e1fbec043 fix: Fixes bug with end_threshold and better error logging 2022-02-14 09:44:04 -06:00
3d3b1a83f6 feat: Adds end threshold for early abort 2022-02-14 09:27:40 -06:00
63cecba824 Created version 0.2.4
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag This commit looks good
2022-02-06 19:50:07 -06:00
344998835d fix: Fixes linting 2022-02-06 19:49:02 -06:00
838aeb0cf3 Created version 0.2.3
Some checks failed
gitea-physics/deepdog/pipeline/head There was a failure building this commit
gitea-physics/deepdog/pipeline/tag There was a failure building this commit
2022-02-06 19:46:10 -06:00
e715d329fd fix: diagnostic adds frequencies 2022-02-06 19:46:05 -06:00
521b49f14c Created version 0.2.2
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag This commit looks good
2022-02-06 19:37:15 -06:00
6d65e8dec5 fix: ignores extra fields in dictwriter 2022-02-06 19:36:59 -06:00
36354c2f2c Adds patch
All checks were successful
gitea-physics/deepdog/pipeline/head This commit looks good
gitea-physics/deepdog/pipeline/tag This commit looks good
2022-02-06 19:29:17 -06:00
3534593557 fix: Take first element of solution list 2022-02-06 19:28:54 -06:00
14 changed files with 1133 additions and 141 deletions

10
.versionrc Normal file
View File

@@ -0,0 +1,10 @@
{
"bumpFiles": [
{
"filename": "pyproject.toml",
"updater": "scripts/standard-version/pyproject-updater.js"
}
],
"sign": true,
"tag-prefix": ""
}

56
CHANGELOG.md Normal file
View File

@@ -0,0 +1,56 @@
# Changelog
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
## [0.4.0](https://gitea.deepak.science:2222/physics/deepdog/compare/0.3.5...0.4.0) (2022-04-10)
### ⚠ BREAKING CHANGES
* Adds pair calculations, with changing api format
### Features
* Adds dynamic cycle count increases to help reach minimum success count ([ec7b4ca](https://gitea.deepak.science:2222/physics/deepdog/commit/ec7b4cac393c15e94c513215c4f1ba32be2ae87a))
* Adds pair calculations, with changing api format ([6463b13](https://gitea.deepak.science:2222/physics/deepdog/commit/6463b135ef2d212b565864b5ac1b655e014d2194))
### Bug Fixes
* uses bigfix from pdme for negatives ([c1c711f](https://gitea.deepak.science:2222/physics/deepdog/commit/c1c711f47b574d3a9b8a24dbcbdd7f50b9be8ea9))
### [0.3.5](https://gitea.deepak.science:2222/physics/deepdog/compare/0.3.4...0.3.5) (2022-03-07)
### Features
* makes chunksize configurable ([88d9613](https://gitea.deepak.science:2222/physics/deepdog/commit/88d961313c1db0d49fd96939aa725a8706fa0412))
### [0.3.4](https://gitea.deepak.science:2222/physics/deepdog/compare/0.3.3...0.3.4) (2022-03-06)
### Features
* Changes chunksize for multiprocessing ([0784cd5](https://gitea.deepak.science:2222/physics/deepdog/commit/0784cd53d79e00684506604f094b5d820b3994d4))
### [0.3.3](https://gitea.deepak.science:2222/physics/deepdog/compare/0.3.2...0.3.3) (2022-03-06)
### Bug Fixes
* Fixes count to use cycles as well ([8617e4d](https://gitea.deepak.science:2222/physics/deepdog/commit/8617e4d2742b112cc824068150682ce3b2cdd879))
### [0.3.2](https://gitea.deepak.science:2222/physics/deepdog/compare/0.3.1...0.3.2) (2022-03-06)
### Features
* Adds monte carlo cycles to trade off space and cpu ([e6d8d33](https://gitea.deepak.science:2222/physics/deepdog/commit/e6d8d33c27e7922581e91c10de4f5faff2a51f8b))
### [0.3.1](https://gitea.deepak.science:2222/physics/deepdog/compare/v0.3.0...v0.3.1) (2022-03-06)
### Features
* Adds alt bayes solver with monte carlo sampler ([7284dbe](https://gitea.deepak.science:2222/physics/deepdog/commit/7284dbeb34ef46189d81fb719252dfa74b8e9fa8))
* Updates to pdme version for faster bayes resolution ([d078004](https://gitea.deepak.science:2222/physics/deepdog/commit/d078004773d9d9dccd0a9a52ca96aa57690f9b7e))

View File

@@ -1,3 +1,18 @@
# deepdog
The dipole diagnostic tool.
[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-green.svg?style=flat-square)](https://conventionalcommits.org)
[![PyPI](https://img.shields.io/pypi/v/deepdog?style=flat-square)](https://pypi.org/project/deepdog/)
[![Jenkins](https://img.shields.io/jenkins/build?jobUrl=https%3A%2F%2Fjenkins.deepak.science%2Fjob%2Fgitea-physics%2Fjob%2Fdeepdog%2Fjob%2Fmaster&style=flat-square)](https://jenkins.deepak.science/job/gitea-physics/job/deepdog/job/master/)
![Jenkins tests](https://img.shields.io/jenkins/tests?compact_message&jobUrl=https%3A%2F%2Fjenkins.deepak.science%2Fjob%2Fgitea-physics%2Fjob%2Fdeepdog%2Fjob%2Fmaster%2F&style=flat-square)
![Jenkins Coverage](https://img.shields.io/jenkins/coverage/cobertura?jobUrl=https%3A%2F%2Fjenkins.deepak.science%2Fjob%2Fgitea-physics%2Fjob%2Fdeepdog%2Fjob%2Fmaster%2F&style=flat-square)
![Maintenance](https://img.shields.io/maintenance/yes/2022?style=flat-square)
The DiPole DiaGnostic tool.
## Getting started
`poetry install` to start locally
Commit using [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/), and when commits are on master, release with `doo release`.

View File

@@ -1,6 +1,7 @@
import logging
from deepdog.meta import __version__
from deepdog.bayes_run import BayesRun
from deepdog.alt_bayes_run import AltBayesRun
from deepdog.diagnostic import Diagnostic
@@ -8,7 +9,7 @@ def get_version():
return __version__
__all__ = ["get_version", "BayesRun", "Diagnostic"]
__all__ = ["get_version", "BayesRun", "AltBayesRun", "Diagnostic"]
logging.getLogger(__name__).addHandler(logging.NullHandler())

189
deepdog/alt_bayes_run.py Normal file
View File

@@ -0,0 +1,189 @@
import pdme.inputs
import pdme.model
import pdme.measurement.input_types
import pdme.measurement.oscillating_dipole
import pdme.util.fast_v_calc
import pdme.util.fast_nonlocal_spectrum
from typing import Sequence, Tuple, List
import datetime
import csv
import multiprocessing
import logging
import numpy
# TODO: remove hardcode
CHUNKSIZE = 50
# TODO: It's garbage to have this here duplicated from pdme.
DotInput = Tuple[numpy.typing.ArrayLike, float]
_logger = logging.getLogger(__name__)
def get_a_result(input) -> int:
discretisation, dot_inputs, lows, highs, monte_carlo_count, max_frequency = input
sample_dipoles = discretisation.get_model().get_n_single_dipoles(monte_carlo_count, max_frequency)
vals = pdme.util.fast_v_calc.fast_vs_for_dipoles(dot_inputs, sample_dipoles)
return numpy.count_nonzero(pdme.util.fast_v_calc.between(vals, lows, highs))
def get_a_result_using_pairs(input) -> int:
discretisation, dot_inputs, pair_inputs, local_lows, local_highs, nonlocal_lows, nonlocal_highs, monte_carlo_count, max_frequency = input
sample_dipoles = discretisation.get_model().get_n_single_dipoles(monte_carlo_count, max_frequency)
local_vals = pdme.util.fast_v_calc.fast_vs_for_dipoles(dot_inputs, sample_dipoles)
local_matches = pdme.util.fast_v_calc.between(local_vals, local_lows, local_highs)
nonlocal_vals = pdme.util.fast_nonlocal_spectrum.fast_s_nonlocal(pair_inputs, sample_dipoles)
nonlocal_matches = pdme.util.fast_v_calc.between(nonlocal_vals, nonlocal_lows, nonlocal_highs)
combined_matches = numpy.logical_and(local_matches, nonlocal_matches)
return numpy.count_nonzero(combined_matches)
class AltBayesRun():
'''
A single Bayes run for a given set of dots.
Parameters
----------
dot_inputs : Sequence[DotInput]
The dot inputs for this bayes run.
discretisations_with_names : Sequence[Tuple(str, pdme.model.Model)]
The models to evaluate.
actual_model_discretisation : pdme.model.Discretisation
The discretisation for the model which is actually correct.
filename_slug : str
The filename slug to include.
run_count: int
The number of runs to do.
'''
def __init__(self, dot_positions: Sequence[numpy.typing.ArrayLike], frequency_range: Sequence[float], discretisations_with_names: Sequence[Tuple[str, pdme.model.Discretisation]], actual_model: pdme.model.Model, filename_slug: str, run_count: int = 100, low_error: float = 0.9, high_error: float = 1.1, pairs_high_error=None, pairs_low_error=None, monte_carlo_count: int = 10000, monte_carlo_cycles: int = 10, target_success: int = 100, max_monte_carlo_cycles_steps: int = 10, max_frequency: float = 20, end_threshold: float = None, chunksize: int = CHUNKSIZE, use_pairs: bool = False) -> None:
self.dot_inputs = pdme.inputs.inputs_with_frequency_range(dot_positions, frequency_range)
self.dot_inputs_array = pdme.measurement.input_types.dot_inputs_to_array(self.dot_inputs)
self.use_pairs = use_pairs
self.dot_pair_inputs = pdme.inputs.input_pairs_with_frequency_range(dot_positions, frequency_range)
self.dot_pair_inputs_array = pdme.measurement.input_types.dot_pair_inputs_to_array(self.dot_pair_inputs)
self.discretisations = [disc for (_, disc) in discretisations_with_names]
self.model_names = [name for (name, _) in discretisations_with_names]
self.actual_model = actual_model
self.model_count = len(self.discretisations)
self.monte_carlo_count = monte_carlo_count
self.monte_carlo_cycles = monte_carlo_cycles
self.target_success = target_success
self.max_monte_carlo_cycles_steps = max_monte_carlo_cycles_steps
self.run_count = run_count
self.low_error = low_error
self.high_error = high_error
if pairs_low_error is None:
self.pairs_low_error = self.low_error
else:
self.pairs_low_error = pairs_low_error
if pairs_high_error is None:
self.pairs_high_error = self.high_error
else:
self.pairs_high_error = pairs_high_error
self.csv_fields = ["dipole_moment", "dipole_location", "dipole_frequency"]
self.compensate_zeros = True
self.chunksize = chunksize
for name in self.model_names:
self.csv_fields.extend([f"{name}_success", f"{name}_count", f"{name}_prob"])
self.probabilities = [1 / self.model_count] * self.model_count
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
if self.use_pairs:
self.filename = f"{timestamp}-{filename_slug}.altbayes.pairs.csv"
else:
self.filename = f"{timestamp}-{filename_slug}.altbayes.csv"
self.max_frequency = max_frequency
if end_threshold is not None:
if 0 < end_threshold < 1:
self.end_threshold: float = end_threshold
self.use_end_threshold = True
_logger.info(f"Will abort early, at {self.end_threshold}.")
else:
raise ValueError(f"end_threshold should be between 0 and 1, but is actually {end_threshold}")
def go(self) -> None:
with open(self.filename, "a", newline="") as outfile:
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect="unix")
writer.writeheader()
for run in range(1, self.run_count + 1):
rng = numpy.random.default_rng()
frequency = rng.uniform(1, self.max_frequency)
# Generate the actual dipoles
actual_dipoles = self.actual_model.get_dipoles(frequency)
dots = actual_dipoles.get_percent_range_dot_measurements(self.dot_inputs, self.low_error, self.high_error)
lows, highs = pdme.measurement.input_types.dot_range_measurements_low_high_arrays(dots)
pair_lows, pair_highs = (None, None)
if self.use_pairs:
pair_measurements = actual_dipoles.get_percent_range_dot_pair_measurements(self.dot_pair_inputs, self.pairs_low_error, self.pairs_high_error)
pair_lows, pair_highs = pdme.measurement.input_types.dot_range_measurements_low_high_arrays(pair_measurements)
_logger.info(f"Going to work on dipole at {actual_dipoles.dipoles}")
results = []
_logger.debug("Going to iterate over discretisations now")
for disc_count, discretisation in enumerate(self.discretisations):
_logger.debug(f"Doing discretisation #{disc_count}")
with multiprocessing.Pool(multiprocessing.cpu_count() - 1 or 1) as pool:
cycle_count = 0
cycle_success = 0
cycles = 0
while (cycles < self.max_monte_carlo_cycles_steps) and (cycle_success <= self.target_success):
_logger.debug(f"Starting cycle {cycles}")
cycles += 1
current_success = 0
cycle_count += self.monte_carlo_count * self.monte_carlo_cycles
if self.use_pairs:
current_success = sum(
pool.imap_unordered(get_a_result_using_pairs, [(discretisation, self.dot_inputs_array, self.dot_pair_inputs_array, lows, highs, pair_lows, pair_highs, self.monte_carlo_count, self.max_frequency)] * self.monte_carlo_cycles, self.chunksize)
)
else:
current_success = sum(
pool.imap_unordered(get_a_result, [(discretisation, self.dot_inputs_array, lows, highs, self.monte_carlo_count, self.max_frequency)] * self.monte_carlo_cycles, self.chunksize)
)
cycle_success += current_success
results.append((cycle_count, cycle_success))
_logger.debug("Done, constructing output now")
row = {
"dipole_moment": actual_dipoles.dipoles[0].p,
"dipole_location": actual_dipoles.dipoles[0].s,
"dipole_frequency": actual_dipoles.dipoles[0].w
}
successes: List[float] = []
counts: List[int] = []
for model_index, (name, (count, result)) in enumerate(zip(self.model_names, results)):
row[f"{name}_success"] = result
row[f"{name}_count"] = count
successes.append(max(result, 0.5))
counts.append(count)
success_weight = sum([(succ / count) * prob for succ, count, prob in zip(successes, counts, self.probabilities)])
new_probabilities = [(succ / count) * old_prob / success_weight for succ, count, old_prob in zip(successes, counts, self.probabilities)]
self.probabilities = new_probabilities
for name, probability in zip(self.model_names, self.probabilities):
row[f"{name}_prob"] = probability
_logger.info(row)
with open(self.filename, "a", newline="") as outfile:
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect="unix")
writer.writerow(row)
if self.use_end_threshold:
max_prob = max(self.probabilities)
if max_prob > self.end_threshold:
_logger.info(f"Aborting early, because {max_prob} is greater than {self.end_threshold}")
break

View File

@@ -41,7 +41,7 @@ class BayesRun():
run_count: int
The number of runs to do.
'''
def __init__(self, dot_inputs: Sequence[DotInput], discretisations_with_names: Sequence[Tuple[str, pdme.model.Discretisation]], actual_model: pdme.model.Model, filename_slug: str, run_count: int, max_frequency: float = None) -> None:
def __init__(self, dot_inputs: Sequence[DotInput], discretisations_with_names: Sequence[Tuple[str, pdme.model.Discretisation]], actual_model: pdme.model.Model, filename_slug: str, run_count: int, max_frequency: float = None, end_threshold: float = None) -> None:
self.dot_inputs = dot_inputs
self.discretisations = [disc for (_, disc) in discretisations_with_names]
self.model_names = [name for (name, _) in discretisations_with_names]
@@ -50,7 +50,6 @@ class BayesRun():
self.run_count = run_count
self.csv_fields = ["dipole_moment", "dipole_location", "dipole_frequency"]
self.compensate_zeros = True
for name in self.model_names:
self.csv_fields.extend([f"{name}_success", f"{name}_count", f"{name}_prob"])
@@ -60,6 +59,14 @@ class BayesRun():
self.filename = f"{timestamp}-{filename_slug}.csv"
self.max_frequency = max_frequency
if end_threshold is not None:
if 0 < end_threshold < 1:
self.end_threshold: float = end_threshold
self.use_end_threshold = True
_logger.info(f"Will abort early, at {self.end_threshold}.")
else:
raise ValueError(f"end_threshold should be between 0 and 1, but is actually {end_threshold}")
def go(self) -> None:
with open(self.filename, "a", newline="") as outfile:
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect="unix")
@@ -88,7 +95,8 @@ class BayesRun():
"dipole_location": dipoles.dipoles[0].s,
"dipole_frequency": dipoles.dipoles[0].w
}
successes: List[int] = []
successes: List[float] = []
counts: List[int] = []
for model_index, (name, result) in enumerate(zip(self.model_names, results)):
count = 0
success = 0
@@ -99,10 +107,11 @@ class BayesRun():
row[f"{name}_success"] = success
row[f"{name}_count"] = count
successes.append(max(success, 1))
successes.append(max(success, 0.5))
counts.append(count)
success_weight = sum([succ * prob for succ, prob in zip(successes, self.probabilities)])
new_probabilities = [succ * old_prob / success_weight for succ, old_prob in zip(successes, self.probabilities)]
success_weight = sum([(succ / count) * prob for succ, count, prob in zip(successes, counts, self.probabilities)])
new_probabilities = [(succ / count) * old_prob / success_weight for succ, count, old_prob in zip(successes, counts, self.probabilities)]
self.probabilities = new_probabilities
for name, probability in zip(self.model_names, self.probabilities):
row[f"{name}_prob"] = probability
@@ -111,3 +120,9 @@ class BayesRun():
with open(self.filename, "a", newline="") as outfile:
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect="unix")
writer.writerow(row)
if self.use_end_threshold:
max_prob = max(self.probabilities)
if max_prob > self.end_threshold:
_logger.info(f"Aborting early, because {max_prob} is greater than {self.end_threshold}")
break

View File

@@ -39,6 +39,8 @@ class SingleDipoleDiagnostic():
self.s_result_x = self.result_dipole.s[0]
self.s_result_y = self.result_dipole.s[1]
self.s_result_z = self.result_dipole.s[2]
self.w_actual = self.actual_dipole.w
self.w_result = self.result_dipole.w
class Diagnostic():
@@ -65,7 +67,7 @@ class Diagnostic():
self.discretisations_with_names = discretisations_with_names
self.model_count = len(self.discretisations_with_names)
self.csv_fields = ["model", "index", "bounds", "p_actual_x", "p_actual_y", "p_actual_z", "s_actual_x", "s_actual_y", "s_actual_z", "actual_dipole_freq", "success", "p_result_x", "p_result_y", "p_result_z", "s_result_x", "s_result_y", "s_result_z"]
self.csv_fields = ["model", "index", "bounds", "p_actual_x", "p_actual_y", "p_actual_z", "s_actual_x", "s_actual_y", "s_actual_z", "w_actual", "success", "p_result_x", "p_result_y", "p_result_z", "s_result_x", "s_result_y", "s_result_z", "w_result"]
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
self.filename = f"{timestamp}-{filename_slug}.diag.csv"
@@ -84,14 +86,14 @@ class Diagnostic():
results = pool.starmap(get_a_result, zip(itertools.repeat(discretisation), itertools.repeat(self.dots), discretisation.all_indices()))
with open(self.filename, "a", newline='') as outfile:
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect='unix')
writer = csv.DictWriter(outfile, fieldnames=self.csv_fields, dialect='unix', extrasaction="ignore")
for idx, result in results:
bounds = discretisation.bounds(idx)
actual_success = result.success and result.cost <= 1e-10
diag_row = SingleDipoleDiagnostic(name, idx, bounds, self.dipoles.dipoles[0], discretisation.model.solution_as_dipoles(result.normalised_x), actual_success)
diag_row = SingleDipoleDiagnostic(name, idx, bounds, self.dipoles.dipoles[0], discretisation.model.solution_as_dipoles(result.normalised_x)[0], actual_success)
row = vars(diag_row)
_logger.debug(f"Writing result {row}")
writer.writerow(row)

4
do.sh
View File

@@ -16,6 +16,10 @@ test() {
poetry run pytest
}
release() {
./scripts/release.sh
}
htmlcov() {
poetry run pytest --cov-report=html
}

861
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,19 @@
[tool.poetry]
name = "deepdog"
version = "0.2.0"
version = "0.4.0"
description = ""
authors = ["Deepak Mallubhotla <dmallubhotla+github@gmail.com>"]
[tool.poetry.dependencies]
python = "^3.8,<3.10"
pdme = "^0.5.0"
pdme = "^0.6.1"
[tool.poetry.dev-dependencies]
pytest = ">=6"
flake8 = "^4.0.1"
pytest-cov = "^3.0.0"
mypy = "^0.931"
mypy = "^0.942"
python-semantic-release = "^7.24.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
@@ -32,3 +33,7 @@ module = [
"scipy.optimize"
]
ignore_missing_imports = true
[tool.semantic_release]
version_toml = "pyproject.toml:tool.poetry.version"
tag_format = "{version}"

3
renovate.json Normal file
View File

@@ -0,0 +1,3 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json"
}

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -Eeuo pipefail
if [ -z "$(git status --porcelain)" ]; then
# Working directory clean
branch_name=$(git symbolic-ref -q HEAD)
branch_name=${branch_name##refs/heads/}
branch_name=${branch_name:-HEAD}
poetry version patch
version=`sed 's/version = "\([0-9]*.[0-9]*.[0-9]*\)"/\1/p' -n <pyproject.toml`
read -p "Create commit for version $version? " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
echo "Creating a new patch"
git add pyproject.toml
git commit -m "Created version $version"
git tag -a "$version" -m "patch.sh created version $version"
git push --tags
else
echo "Surrendering, clean up by reverting pyproject.toml..."
exit 2
fi
else
echo "Can't create patch version, working tree unclean..."
exit 1
fi

45
scripts/release.sh Normal file
View File

@@ -0,0 +1,45 @@
#!/usr/bin/env bash
set -Eeuo pipefail
if [ -z "$(git status --porcelain)" ]; then
branch_name=$(git symbolic-ref -q HEAD)
branch_name=${branch_name##refs/heads/}
branch_name=${branch_name:-HEAD}
if [ $branch_name != "master" ]; then
echo "The current branch is not master!"
echo "I'd feel uncomfortable releasing from here..."
exit 3
fi
release_needed=false
if \
{ git log "$( git describe --tags --abbrev=0 )..HEAD" --format='%s' | cut -d: -f1 | sort -u | sed -e 's/([^)]*)//' | grep -q -i -E '^feat|fix|perf|refactor|revert$' ; } || \
{ git log "$( git describe --tags --abbrev=0 )..HEAD" --format='%s' | cut -d: -f1 | sort -u | sed -e 's/([^)]*)//' | grep -q -E '\!$' ; } || \
{ git log "$( git describe --tags --abbrev=0 )..HEAD" --format='%b' | grep -q -E '^BREAKING CHANGE:' ; }
then
release_needed=true
fi
if ! [ "$release_needed" = true ]; then
echo "No release needed..."
exit 0
fi
# Working directory clean
echo "Doing a dry run..."
npx standard-version --dry-run
read -p "Does that look good? [y/N] " -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
# do dangerous stuff
npx standard-version
git push --follow-tags origin master
else
echo "okay, never mind then..."
exit 2
fi
else
echo "Can't create release, working tree unclean..."
exit 1
fi

View File

@@ -0,0 +1,11 @@
const pattern = /(\[tool\.poetry\]\nname = "deepdog"\nversion = ")(?<vers>\d+\.\d+\.\d)(")/mg;
module.exports.readVersion = function (contents) {
const result = pattern.exec(contents);
return result.groups.vers;
}
module.exports.writeVersion = function (contents, version) {
const newContents = contents.replace(pattern, `$1${version}$3`);
return newContents;
}