Commit b4ff3faa authored by Raza, Zamaan's avatar Raza, Zamaan
Browse files

Merge branch 'integrator_testing' into 'develop'

Integrator testing scripts

See merge request mlz/nsxtool!453
parents ed17ef21 83424598
Pipeline #66068 failed with stage
in 2 minutes and 18 seconds
......@@ -599,6 +599,11 @@ bool Experiment::addShapeModel(const std::string& name, const ShapeModel& shapes
return _shape_handler->addShapeModel(name, shapes);
}
bool Experiment::addEmptyShapeModel(const std::string &name)
{
return _shape_handler->addEmptyModel(name);
}
bool Experiment::hasShapeModel(const std::string& name) const
{
return _shape_handler->hasShapeModel(name);
......
......@@ -170,19 +170,21 @@ class Experiment {
std::vector<PeakCollection*> getPeakCollections();
// ShapeHandler
//! Add a peak collection
//! Add a shape model
bool addShapeModel(const std::string& name, const ShapeModel& shapes);
//! Returns true if the experiment has named peak collection
//! Add an empty shape model
bool addEmptyShapeModel(const std::string& name);
//! Returns true if the experiment has named shape model
bool hasShapeModel(const std::string& name) const;
//! Returns the named peak collection
//! Returns the named shape model
ShapeModel* getShapeModel(const std::string name);
// !Remove a shape collection from the experiment
//! Remove a shape model from the experiment
void removeShapeModel(const std::string& name);
//! Get the number of shape collections
//! Get the number of shape models
int numShapeModels() const;
//! Generate name for new peak collection
//! Generate name for new shape model
std::string generateShapeModelName();
//! Get a vector of pointers to peak collections
//! Get a vector of pointers to shape models
std::vector<ShapeModel*> getShapeModels();
// Instrument state handler
......
......@@ -44,7 +44,7 @@ bool ShapeHandler::addShapeModel(const std::string& name, const nsx::ShapeModel&
return hasShapeModel(name); // now name must be in use
}
bool ShapeHandler::addEmptyCollection(const std::string& name)
bool ShapeHandler::addEmptyModel(const std::string& name)
{
if (hasShapeModel(name))
return false;
......
......@@ -33,25 +33,25 @@ class ShapeHandler {
ShapeHandler() = default;
~ShapeHandler();
//! Get a pointer to the map of shape collections
//! Get a pointer to the map of shape models
const ShapeModelMap* getShapeModelMap() const;
//! Add a shape collection
//! Add a shape model
bool addShapeModel(const std::string& name, const nsx::ShapeModel& shapes);
//! Add an empty shape collection
bool addEmptyCollection(const std::string& name);
//! Returns true if the experiment has named shape collection
//! Add an empty shape model
bool addEmptyModel(const std::string& name);
//! Returns true if the experiment has named shape model
bool hasShapeModel(const std::string& name) const;
//! Returns the named shape collection
//! Returns the named shape model
ShapeModel* getShapeModel(const std::string name);
// !Remove a shape collection from the experiment
// !Remove a shape model
void removeShapeModel(const std::string& name);
//! Get a vector of shape collection names from the handler
//! Get a vector of shape model
std::vector<std::string> getCollectionNames() const;
//! Get the number of shape collections
//! Get the number of shape models
int numShapeModels() const { return _shape_models.size(); };
//! Generate name for new shape collection
//! Generate name for new shape model
std::string generateName();
//! Get a vector of pointers to shape collections
//! Get a vector of pointers to shape models
std::vector<ShapeModel*> getShapeModels();
private:
......
......@@ -18,6 +18,7 @@
#include "base/geometry/Ellipsoid.h"
#include "base/utils/Logger.h"
#include "core/data/DataSet.h"
#include "core/data/DataTypes.h"
#include "core/detector/Detector.h"
#include "core/detector/DetectorEvent.h"
#include "core/instrument/Diffractometer.h"
......@@ -390,7 +391,7 @@ std::optional<Eigen::Matrix3d> ShapeModel::meanCovariance(
return JI * cov * JI.transpose();
}
void ShapeModel::setPredictedShapes(PeakCollection* peaks, PeakInterpolation interpolation)
void ShapeModel::setPredictedShapes(PeakCollection* peaks)
{
nsxlog(
Level::Info, "ShapeModel: Computing shapes of ", peaks->numberOfPeaks(),
......@@ -405,6 +406,7 @@ void ShapeModel::setPredictedShapes(PeakCollection* peaks, PeakInterpolation int
_handler->setProgress(0);
}
int n_bad_shapes = 0;
#pragma omp parallel for
for (auto peak : peaks->getPeakList()) {
peak->setPredicted(true);
......@@ -414,9 +416,11 @@ void ShapeModel::setPredictedShapes(PeakCollection* peaks, PeakInterpolation int
// too few or no neighbouring peaks found)
if (auto cov = meanCovariance(
peak, _params->neighbour_range_pixels, _params->neighbour_range_frames,
_params->min_n_neighbors, interpolation)) {
_params->min_n_neighbors, _params->interpolation)) {
Eigen::Vector3d center = peak->shape().center();
peak->setShape(Ellipsoid(center, cov.value().inverse()));
} else {
++n_bad_shapes;
}
if (_handler) {
......@@ -424,7 +428,7 @@ void ShapeModel::setPredictedShapes(PeakCollection* peaks, PeakInterpolation int
_handler->setProgress(progress);
}
}
nsxlog(Level::Info, "ShapeModel: finished computing shapes");
nsxlog(Level::Info, "ShapeModel: finished computing shapes, ", n_bad_shapes, " failures");
}
std::array<double, 6> ShapeModel::choleskyD() const
......@@ -487,6 +491,9 @@ void ShapeModel::integrate(
integrator.setParameters(int_params);
int n_numor = 1;
// Why loop over all numors? - zamaan
// Do we expect to have peaks from different numors in the vector? If so,
// how do we ensure that the sample rotation angles are consistent?
for (const nsx::sptrDataSet& data : datalist) {
integrator.integrate(peaks, this, data, n_numor);
++n_numor;
......@@ -494,6 +501,45 @@ void ShapeModel::integrate(
nsxlog(Level::Info, "ShapeModel::integrate: finished integrating shapes");
}
void ShapeModel::build(PeakCollection* peaks, sptrDataSet data)
{
nsxlog(Level::Info, "ShapeModel::build: building shape model");
peaks->computeSigmas();
_params->sigma_d = peaks->sigmaD();
_params->sigma_m = peaks->sigmaM();
std::vector<nsx::Peak3D*> fit_peaks;
for (nsx::Peak3D* peak : peaks->getPeakList()) {
if (!peak->enabled())
continue;
const double d = 1.0 / peak->q().rowVector().norm();
if (d > _params->d_max || d < _params->d_min)
continue;
const nsx::Intensity intensity = peak->correctedIntensity();
if (intensity.value() <= _params->strength_min * intensity.sigma())
continue;
fit_peaks.push_back(peak);
}
nsxlog(Level::Info, "ShapeModel::build: integrating ", fit_peaks.size(), " peaks");
ShapeIntegrator integrator(
this, getAABB(), _params->nbins_x, _params->nbins_y, _params->nbins_z);
nsx::IntegrationParameters int_params{};
int_params.peak_end = _params->peak_end;
int_params.bkg_begin = _params->bkg_begin;
int_params.bkg_end = _params->bkg_end;
integrator.setNNumors(1);
integrator.setParameters(int_params);
integrator.integrate(fit_peaks, this, data, 1);
nsxlog(Level::Info, "ShapeModel::build: finished integrating shapes");
nsxlog(Level::Info, "ShapeModel::build: updating fit");
updateFit(1000);
nsxlog(Level::Info, "ShapeModel::build: done");
}
void ShapeModel::setHandler(sptrProgressHandler handler)
{
_handler = handler;
......
......@@ -16,6 +16,7 @@
#define NSX_CORE_SHAPE_SHAPECOLLECTION_H
#include "base/utils/ProgressHandler.h"
#include "core/data/DataTypes.h"
#include "core/shape/IPeakIntegrator.h"
#include "core/shape/Profile1D.h"
#include "core/shape/Profile3D.h"
......@@ -94,7 +95,7 @@ class ShapeModel {
void setParameters(std::shared_ptr<ShapeModelParameters> params);
//! Set shapes of a predicted peak collection
void setPredictedShapes(PeakCollection* peaks, PeakInterpolation interpolation);
void setPredictedShapes(PeakCollection* peaks);
//! Predict the (detector space) covariance of a given peak
Eigen::Matrix3d predictCovariance(Peak3D* peak) const;
......@@ -148,6 +149,9 @@ class ShapeModel {
std::vector<Peak3D*> peaks, std::set<nsx::sptrDataSet> datalist,
sptrProgressHandler handler = nullptr);
//! Build a shape model from the given peak collection
void build(PeakCollection* peaks, sptrDataSet data);
//! Set the progress handler
void setHandler(sptrProgressHandler handler);
......
......@@ -136,4 +136,12 @@ const DataResolution* PeakMerger::overallQuality()
return &_overall_quality;
}
std::string PeakMerger::summary()
{
std::ostringstream oss;
for (const auto& shell : _shell_qualities.shells)
oss << shell.toString() << std::endl;
return oss.str();
}
} // namespace nsx
......@@ -74,6 +74,9 @@ class PeakMerger {
//! Get the overall quality statistics
const DataResolution* overallQuality();
//! Return a string containing a summary of statistics
std::string summary();
private:
std::unique_ptr<MergedData> _merged_data;
std::vector<std::unique_ptr<MergedData>> _merged_data_per_shell;
......
......@@ -599,7 +599,7 @@ void SubframePredictPeaks::applyShapeModel()
shapes->setParameters(_shape_params);
shapes->setHandler(handler);
shapes->setPredictedShapes(&_peak_collection, _shape_params->interpolation);
shapes->setPredictedShapes(&_peak_collection);
refreshPeakTable();
_shapes_assigned = true;
......
......@@ -590,13 +590,10 @@ void SubframeShapes::assignPeakShapes()
ProgressView progressView(nullptr);
progressView.watch(handler);
int interpol = _interpolation_combo->currentIndex();
nsx::PeakInterpolation peak_interpolation = static_cast<nsx::PeakInterpolation>(interpol);
nsx::PeakCollection* peaks = _predicted_combo->currentPeakCollection();
_shape_combo->currentShapes()->setHandler(handler);
_shape_combo->currentShapes()->setPredictedShapes(peaks, peak_interpolation);
_shape_combo->currentShapes()->setPredictedShapes(peaks);
gGui->statusBar()->showMessage(
QString::number(peaks->numberOfValid()) + "/" + QString::number(peaks->numberOfPeaks())
+ " predicted peaks with valid shapes");
......
......@@ -5,5 +5,8 @@
if(NSX_PYTHON)
configure_file(${CMAKE_SOURCE_DIR}/scripts/workflow.py workflow.py)
configure_file(${CMAKE_SOURCE_DIR}/scripts/integrate.py integrate.py)
configure_file(${CMAKE_SOURCE_DIR}/scripts/pxrange_test.py pxrange_test.py)
configure_file(${CMAKE_SOURCE_DIR}/scripts/frange_test.py frange_test.py)
configure_file(${CMAKE_SOURCE_DIR}/scripts/peakend_test.py peakend_test.py)
configure_file(${CMAKE_SOURCE_DIR}/scripts/qspace-ellipsoid.py qspace-ellipsoid.py)
endif()
#!/usr/bin/python
## ***********************************************************************************************
##
## NSXTool: data reduction for neutron single-crystal diffraction
##
##! @file test/python/TestWorkFlow.py
##! @brief Test ...
##!
##! @homepage ###HOMEPAGE###
##! @license GNU General Public License v3 or higher (see COPYING)
##! @copyright Institut Laue-Langevin and Forschungszentrum Jülich GmbH 2016-
##! @authors see CITATION, MAINTAINER
##
## ***********************************************************************************************
import sys
import numpy as np
from pathlib import Path
lib_dir = "@SWIG_INSTALL_PATH@" # Path to pynsx.py
sys.path.append(lib_dir)
import pynsx as nsx
file = Path('/home/zamaan/projects/datasets/120522b_after_integrate_pxsum336.nsx')
experiment = '120522b'
diffractometer = 'BioDiff2500'
data_name = 'soak_9_d2_I_scanI_9483.raw'
space_group = "P 21 21 21"
found_peaks_name = 'findint'
predicted_peaks_name = 'predict1'
expt = nsx.Experiment(experiment, diffractometer)
expt.loadFromFile(str(file))
data = expt.getData(data_name)
found_peaks = expt.getPeakCollection(found_peaks_name)
predicted_peaks = expt.getPeakCollection(predicted_peaks_name)
expt.addEmptyShapeModel("shapes")
shapes = expt.getShapeModel("shapes")
shapes.build(found_peaks, data)
shape_params = shapes.parameters()
for f_range in range(4, 18, 2):
name = "frange_" + str(f_range);
print(name)
shape_params.neighbour_range_pixels = 400
shape_params.neighbour_range_frames = f_range
shape_params.min_neighbours = 10
shape_params.interpolation = nsx.PeakInterpolation_InverseDistance
shapes.setPredictedShapes(predicted_peaks)
# Integration parameters
integration_params = expt.integrator().parameters()
integration_params.integrator_type = nsx.IntegratorType_PixelSum
integrator = expt.integrator()
integrator.integratePeaks(data, predicted_peaks, integration_params, shapes)
# Merge parameters
merger = expt.peakMerger()
params = merger.parameters()
merger.reset()
params.d_min = 1.5
merger.addPeakCollection(predicted_peaks)
merger.setSpaceGroup(nsx.SpaceGroup(space_group))
merger.mergePeaks()
merger.computeQuality()
with open(name, "w") as outfile:
outfile.write(merger.summary())
print(merger.summary())
......@@ -140,6 +140,6 @@ shapes = found_peaks.shapeCollection()
# Assign shapes to predicted peaks
interpolation = nsx.PeakInterpolation_InverseDistance
shapes.setPredictedShapes(predicted_peaks, interpolation)
shapes.setPredictedShapes(predicted_peaks)
expt.saveToFile(f'{name}.nsx')
......@@ -22,55 +22,40 @@ lib_dir = "@SWIG_INSTALL_PATH@" # Path to pynsx.py
sys.path.append(lib_dir)
import pynsx as nsx
file = Path('/home/zamaan/projects/250422b_trypsin.nsx')
experiment = '250422b'
file = Path('/home/zamaan/projects/datasets/120522b_after_integrate_pxsum336.nsx')
experiment = '120522b'
diffractometer = 'BioDiff2500'
data_name = 'soak_9_d2_I_scanI_9483.raw'
space_group = "P 21 21 21"
found_peaks_name = 'findint'
predicted_peaks_name = 'predict1'
if (file.is_file()):
print('Found .nsx file ' + str(file))
else:
print('Could not find file ' + str(file))
expt = nsx.Experiment(experiment, diffractometer)
expt.loadFromFile(str(file))
data = expt.getData(data_name)
found_peaks = expt.getPeakCollection(found_peaks_name)
predicted_peaks = expt.getPeakCollection(predicted_peaks_name)
def get_shapes(data, peaks, params):
peaks.computeSigmas()
params.sigma_d = peaks.sigmaD()
params.sigma_m = peaks.sigmaM()
peaks.buildShapeModel(data, params)
print(f'{peaks.shapeCollection().numberOfPeaks()} shapes generated')
return peaks.shapeCollection()
def integrate(integrator_type, data, peaks, shapes, params):
integrator = expt.integrator()
integrator.integratePeaks(integrator_type, data, peaks)
print(f'{integrator.numberOfValidPeaks()} / {integrator.numberOfPeaks()} peaks integrated')
print('Generating shapes...')
params = nsx.ShapeModelParameters()
shapes = get_shapes(data, found_peaks, params)
expt.addEmptyShapeModel("shapes")
shapes = expt.getShapeModel("shapes")
shapes.build(found_peaks, data)
shape_params = shapes.parameters()
print('Assigning peak shapes...')
params.neighbour_range_pixels = 500
params.neighbour_range_frames = 10
params.min_neighbours = 10
interpolation_type = nsx.PeakInterpolation_NoInterpolation
# found_peaks.shapeCollection().setPredictedShapes(predicted_peaks, interpolation_type)
shape_params.neighbour_range_pixels = 500
shape_params.neighbour_range_frames = 10
shape_params.min_neighbours = 10
shape_params.interpolation = nsx.PeakInterpolation_InverseDistance
shapes.setPredictedShapes(predicted_peaks)
print('Integrating predicted peaks...')
# Integration parameters
params = expt.integrator().parameters()
integrator_type = nsx.IntegratorType_Profile3D
integrate(integrator_type, data, predicted_peaks, shapes, params)
integration_params = expt.integrator().parameters()
integration_params.integrator_type = nsx.IntegratorType_Profile3D
integrator = expt.integrator()
integrator.integratePeaks(data, predicted_peaks, integration_params, shapes)
print(f'{integrator.numberOfValidPeaks()} / {integrator.numberOfPeaks()} peaks integrated')
print('Merging predicted peaks...')
......@@ -79,10 +64,10 @@ merger = expt.peakMerger()
params = merger.parameters()
merger.reset()
params.d_min = 1.5
params.frame_min = 1 # exclude first and last frames from statistics
params.frame_max = data.nFrames() - 1
merger.addPeakCollection(predicted_peaks)
merger.setSpaceGroup(nsx.SpaceGroup(space_group))
merger.mergePeaks()
merger.computeQuality()
print(merger.summary())
print("Integration complete")
#!/usr/bin/python
## ***********************************************************************************************
##
## NSXTool: data reduction for neutron single-crystal diffraction
##
##! @file test/python/TestWorkFlow.py
##! @brief Test ...
##!
##! @homepage ###HOMEPAGE###
##! @license GNU General Public License v3 or higher (see COPYING)
##! @copyright Institut Laue-Langevin and Forschungszentrum Jülich GmbH 2016-
##! @authors see CITATION, MAINTAINER
##
## ***********************************************************************************************
import sys
import numpy as np
from pathlib import Path
lib_dir = "@SWIG_INSTALL_PATH@" # Path to pynsx.py
sys.path.append(lib_dir)
import pynsx as nsx
file = Path('/home/zamaan/projects/datasets/120522b_after_integrate_pxsum336.nsx')
experiment = '120522b'
diffractometer = 'BioDiff2500'
data_name = 'soak_9_d2_I_scanI_9483.raw'
space_group = "P 21 21 21"
found_peaks_name = 'findint'
predicted_peaks_name = 'predict1'
expt = nsx.Experiment(experiment, diffractometer)
expt.loadFromFile(str(file))
data = expt.getData(data_name)
found_peaks = expt.getPeakCollection(found_peaks_name)
predicted_peaks = expt.getPeakCollection(predicted_peaks_name)
expt.addEmptyShapeModel("shapes")
shapes = expt.getShapeModel("shapes")
shapes.build(found_peaks, data)
shape_params = shapes.parameters()
for peak_end in np.arange(2.5, 3.6, 0.1):
name = "peakend_" + str(peak_end);
expt.clonePeakCollection('predict1', name)
peaks = expt.getPeakCollection(name)
print(name)
shape_params.neighbour_range_pixels = 500
shape_params.neighbour_range_frames = 10
shape_params.min_neighbours = 10
shape_params.interpolation = nsx.PeakInterpolation_InverseDistance
shapes.setPredictedShapes(peaks)
# Integration parameters
integration_params = expt.integrator().parameters()
integration_params.integrator_type = nsx.IntegratorType_PixelSum
integration_params.peak_end = peak_end
integration_params.bkg_begin = peak_end
integrator = expt.integrator()
integrator.integratePeaks(data, peaks, integration_params, shapes)
# Merge parameters
merger = expt.peakMerger()
params = merger.parameters()
merger.reset()
params.d_min = 1.5
merger.addPeakCollection(peaks)
merger.setSpaceGroup(nsx.SpaceGroup(space_group))
merger.mergePeaks()
merger.computeQuality()
with open(name, "w") as outfile:
outfile.write(merger.summary())
print(merger.summary())
#!/usr/bin/python
import matplotlib.pyplot as plt
import numpy as np
import pathlib
files = pathlib.Path('.').glob("frange_*")
plt.xlabel("Resolution shell")
plt.ylabel("Rmerge")
for file in sorted(files):
frange = int(str(file).split("_")[1])
data = []
with open(file, "r") as infile:
for line in infile.readlines():
data.append(float(line.split()[4]))
plt.plot(data, label=str(frange), linewidth=0.5)
plt.yticks(np.arange(0, 1, 0.1))
plt.ylim(0.1, 0.6)
plt.legend()
plt.savefig("trypsin_profile3d_frange.pdf")
#!/usr/bin/python
import matplotlib.pyplot as plt
import numpy as np
import pathlib
files = pathlib.Path('.').glob("peakend*")
plt.xlabel("Resolution shell")
plt.ylabel("Rmerge")
for file in sorted(files):
peakend = float(str(file).split("_")[1])
data = []
with open(file, "r") as infile:
for line in infile.readlines():
data.append(float(line.split()[4]))
plt.plot(data, label=str(peakend), linewidth=0.5)
plt.yticks(np.arange(0, 1, 0.1))
plt.ylim(0.0, 0.6)
plt.legend()
plt.savefig("trypsin_pxsum_peakend.pdf")
#!/usr/bin/python
import matplotlib.pyplot as plt
import numpy as np
import pathlib
files = pathlib.Path('.').glob("pxrange*")
plt.xlabel("Resolution shell")
plt.ylabel("Rmerge")
for file in sorted(files):
pxrange = int(str(file)[-3:])
data = []
with open(file, "r") as infile:
for line in infile.readlines():
data.append(float(line.split()[4]))
plt.plot(data, label=str(pxrange), linewidth=0.5)
plt.yticks(np.arange(0, 1, 0.1))
plt.ylim(0.1, 0.6)
plt.legend()
plt.savefig("trypsin_profile3d_pxrange.pdf")
#!/usr/bin/python
## ***********************************************************************************************
##
## NSXTool: data reduction for neutron single-crystal diffraction
##
##! @file test/python/TestWorkFlow.py
##! @brief Test ...
##!
##! @homepage ###HOMEPAGE###
##! @license GNU General Public License v3 or higher (see COPYING)
##! @copyright Institut Laue-Langevin and Forschungszentrum Jülich GmbH 2016-
##! @authors see CITATION, MAINTAINER
##
## ***********************************************************************************************