/*
* Project: MoleCuilder
* Description: creates and alters molecular systems
* Copyright (C) 2010-2012 University of Bonn. All rights reserved.
*
*
* This file is part of MoleCuilder.
*
* MoleCuilder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* MoleCuilder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MoleCuilder. If not, see .
*/
/*
* FragmentationAutomationAction.cpp
*
* Created on: May 18, 2012
* Author: heber
*/
// include config.h
#ifdef HAVE_CONFIG_H
#include
#endif
#include
// boost asio needs specific operator new
#include
#include "CodePatterns/MemDebug.hpp"
#include
#include "CodePatterns/Assert.hpp"
#include "CodePatterns/Info.hpp"
#include "CodePatterns/Log.hpp"
#include "JobMarket/Controller/FragmentController.hpp"
#include "JobMarket/Jobs/FragmentJob.hpp"
#include "Atom/atom.hpp"
#include "Box.hpp"
#include "Element/element.hpp"
#include "Fragmentation/EnergyMatrix.hpp"
#include "Fragmentation/ForceMatrix.hpp"
#include "Fragmentation/Fragmentation.hpp"
#include "Fragmentation/SetValues/Fragment.hpp"
#include "Fragmentation/SetValues/Histogram.hpp"
#include "Fragmentation/SetValues/IndexedVectors.hpp"
#include "Fragmentation/HydrogenSaturation_enum.hpp"
#include "Fragmentation/KeySet.hpp"
#include "Fragmentation/KeySetsContainer.hpp"
#include "Fragmentation/Summation/OrthogonalSumUpPerLevel.hpp"
#include "Fragmentation/Summation/SumUpPerLevel.hpp"
#include "Fragmentation/Summation/OrthogonalFullSummator.hpp"
#include "Fragmentation/Summation/OrthogonalSummation.hpp"
#include "Fragmentation/Summation/writeTable.hpp"
#include "Graph/DepthFirstSearchAnalysis.hpp"
#include "Helpers/defs.hpp"
#include "Jobs/MPQCJob.hpp"
#include "Jobs/MPQCData.hpp"
#include "Jobs/MPQCData_printKeyNames.hpp"
#include "Jobs/Grid/SamplingGrid.hpp"
#include "LinearAlgebra/RealSpaceMatrix.hpp"
#ifdef HAVE_VMG
#include "Jobs/VMGJob.hpp"
#include "Jobs/VMGData.hpp"
#include "Jobs/VMGDataFused.hpp"
#include "Jobs/VMGDataMap.hpp"
#include "Jobs/VMGData_printKeyNames.hpp"
#endif
#include "molecule.hpp"
#include "World.hpp"
#include
#include
#include
#include
#include "Actions/FragmentationAction/FragmentationAutomationAction.hpp"
using namespace MoleCuilder;
// and construct the stuff
#include "FragmentationAutomationAction.def"
#include "Action_impl_pre.hpp"
/** =========== define the function ====================== */
class controller_AddOn;
// needs to be defined for using the FragmentController
controller_AddOn *getAddOn()
{
return NULL;
}
const int LEVEL = 5;
/** Creates a MPQCCommandJob with argument \a filename.
*
* @param jobs created job is added to this vector
* @param command mpqc command to execute
* @param filename filename being argument to job
* @param nextid id for this job
*/
void parsejob(
std::vector &jobs,
const std::string &command,
const std::string &filename,
const JobId_t nextid)
{
std::ifstream file;
file.open(filename.c_str());
ASSERT( file.good(), "parsejob() - file "+filename+" does not exist.");
std::string output((std::istreambuf_iterator(file)),
std::istreambuf_iterator());
double begin[NDIM] = { 0., 0., 0. };
const RealSpaceMatrix& M = World::getInstance().getDomain().getM();
const double size = M.at(0,0);
ASSERT( M.determinant() == size*size*size,
"parsejob() - current domain matrix "+toString(M)+" is not cubic.");
const int level = LEVEL;
FragmentJob::ptr testJob( new MPQCJob(nextid, output, begin, size, level) );
jobs.push_back(testJob);
file.close();
LOG(1, "INFO: Added MPQCCommandJob from file "+filename+".");
}
/** Helper function to get number of atoms somehow.
*
* Here, we just parse the number of lines in the adjacency file as
* it should correspond to the number of atoms, except when some atoms
* are not bonded, but then fragmentation makes no sense.
*
* @param path path to the adjacency file
*/
size_t getNoAtomsFromAdjacencyFile(const std::string &path)
{
size_t NoAtoms = 0;
// parse in special file to get atom count (from line count)
std::string filename(path);
filename += FRAGMENTPREFIX;
filename += ADJACENCYFILE;
std::ifstream adjacency(filename.c_str());
if (adjacency.fail()) {
LOG(0, endl << "getNoAtomsFromAdjacencyFile() - Unable to open " << filename << ", is the directory correct?");
return false;
}
std::string buffer;
while (getline(adjacency, buffer))
NoAtoms++;
LOG(1, "INFO: There are " << NoAtoms << " atoms.");
return NoAtoms;
}
/** Extracts MPQCData from received vector of FragmentResults.
*
* @param results results to extract MPQCData from
* @param fragmentData on return array filled with extracted MPQCData
*/
template
void ConvertFragmentResultTo(
const std::vector &results,
std::vector &fragmentData)
{
// extract results
fragmentData.clear();
fragmentData.reserve(results.size());
LOG(2, "DEBUG: Parsing now through " << results.size() << " results.");
for (std::vector::const_iterator iter = results.begin();
iter != results.end(); ++iter) {
//LOG(1, "RESULT: job #"+toString((*iter)->getId())+": "+toString((*iter)->result));
T extractedData;
std::stringstream inputstream((*iter)->result);
LOG(2, "DEBUG: First 50 characters FragmentResult's string: "+(*iter)->result.substr(0, 50));
boost::archive::text_iarchive ia(inputstream);
ia >> extractedData;
LOG(1, "INFO: extracted data is " << extractedData << ".");
fragmentData.push_back(extractedData);
}
ASSERT( results.size() == fragmentData.size(),
"ConvertFragmentResultTo() - the number of extracted data differs from the number of results.");
}
/** Creates a lookup from FragmentJob::id to the true fragment number.
*
* @param jobids vector with job ids
* @param MatrixNrLookup Lookup up-map, filled on return
* @param FragmentCounter total number of fragments on return
*/
void createMatrixNrLookup(
const std::vector &jobids,
std::map< JobId_t, size_t > &MatrixNrLookup,
size_t &FragmentCounter)
{
// align fragments
MatrixNrLookup.clear();
FragmentCounter = 0;
for (std::vector::const_iterator iter = jobids.begin();
iter != jobids.end(); ++iter) {
#ifndef NDEBUG
std::pair< std::map< JobId_t, size_t >::iterator, bool> inserter =
#endif
MatrixNrLookup.insert( std::make_pair(*iter, FragmentCounter++) );
ASSERT( inserter.second,
"createMatrixNrLookup() - two results have same id "
+toString(*iter)+".");
}
LOG(1, "INFO: There are " << FragmentCounter << " fragments.");
}
/** Place results from FragmentResult into EnergyMatrix and ForceMatrix.
*
* @param jobids jobids with ids to associate with fragment number
* @param fragmentData MPQCData resulting from the jobs
* @param MatrixNrLookup Lookup up-map from job id to fragment number
* @param FragmentCounter total number of fragments
* @param NoAtoms total number of atoms
* @param Energy energy matrix to be filled on return
* @param Force force matrix to be filled on return
* @return true - everything ok, false - else
*/
bool putResultsintoMatrices(
const std::vector &jobids,
const std::vector &fragmentData,
std::map< JobId_t, size_t > &MatrixNrLookup,
const size_t FragmentCounter,
const size_t NoAtoms,
EnergyMatrix &Energy,
ForceMatrix &Force)
{
ASSERT( jobids.size() == fragmentData.size(),
"putResultsintoMatrices() - jobids and fragmentData differ in size.");
std::vector::const_iterator dataiter = fragmentData.begin();
std::vector::const_iterator iditer = jobids.begin();
for (; dataiter != fragmentData.end(); ++dataiter, ++iditer) {
const MPQCData &extractedData = *dataiter;
// place results into EnergyMatrix ...
{
MatrixContainer::MatrixArray matrix;
matrix.resize(1);
matrix[0].resize(1, extractedData.energies.total);
if (!Energy.AddMatrix(
std::string("MPQCJob ")+toString(*iditer),
matrix,
MatrixNrLookup[*iditer])) {
ELOG(1, "Adding energy matrix failed.");
return false;
}
}
// ... and ForceMatrix (with two empty columns in front)
{
MatrixContainer::MatrixArray matrix;
const size_t rows = extractedData.forces.size();
matrix.resize(rows);
for (size_t i=0;i &jobids,
const std::vector &fragmentData,
const std::string &KeySetFilename,
SamplingGrid &full_sample,
Fragment &full_fragment)
{
// create lookup from job nr to fragment number
std::map< JobId_t, size_t > MatrixNrLookup;
size_t FragmentCounter = 0;
createMatrixNrLookup(jobids, MatrixNrLookup, FragmentCounter);
// initialise keysets
KeySetsContainer KeySet;
{
// else needs keysets without hydrogens
std::stringstream filename;
filename << FRAGMENTPREFIX << KEYSETFILE;
if (!KeySet.ParseKeySets(KeySetFilename, filename.str(), FragmentCounter)) return false;
}
/// prepare for OrthogonalSummation
// convert KeySetContainer to IndexSetContainer
IndexSetContainer::ptr container(new IndexSetContainer(KeySet));
// create the map of all keysets
SubsetMap::ptr subsetmap(new SubsetMap(*container));
/// convert all MPQCData to MPQCDataMap_t
std::vector Result_Grid_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Fragment_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
// obtain full grid
full_sample = boost::fusion::at_key(Result_Grid_fused.back());
full_fragment = boost::fusion::at_key(Result_Fragment_fused.back());
return true;
}
/** Print MPQCData from received results.
*
* @param results results with ids to associate with fragment number
* @param fragmentData MPQCData resulting from the jobs
* @param KeySetFilename filename with keysets to associate forces correctly
* @param NoAtoms total number of atoms
* @param full_sample summed up charge from fragments on return
*/
bool printReceivedMPQCResults(
const std::vector &jobids,
const std::vector &fragmentData,
const std::string &KeySetFilename,
size_t NoAtoms,
SamplingGrid &full_sample)
{
// create lookup from job nr to fragment number
std::map< JobId_t, size_t > MatrixNrLookup;
size_t FragmentCounter = 0;
createMatrixNrLookup(jobids, MatrixNrLookup, FragmentCounter);
// place results into maps
EnergyMatrix Energy;
ForceMatrix Force;
if (!putResultsintoMatrices(jobids, fragmentData, MatrixNrLookup, FragmentCounter, NoAtoms, Energy, Force))
return false;
// initialise keysets
KeySetsContainer KeySet;
KeySetsContainer ForceKeySet;
if (!Energy.InitialiseIndices()) return false;
if (!Force.ParseIndices(KeySetFilename.c_str())) return false;
{
// else needs keysets without hydrogens
std::stringstream filename;
filename << FRAGMENTPREFIX << KEYSETFILE;
if (!KeySet.ParseKeySets(KeySetFilename, filename.str(), FragmentCounter)) return false;
}
{
// forces need keysets including hydrogens
std::stringstream filename;
filename << FRAGMENTPREFIX << FORCESFILE;
if (!ForceKeySet.ParseKeySets(KeySetFilename, filename.str(), FragmentCounter)) return false;
}
/// prepare for OrthogonalSummation
// convert KeySetContainer to IndexSetContainer
IndexSetContainer::ptr container(new IndexSetContainer(KeySet));
// create the map of all keysets
SubsetMap::ptr subsetmap(new SubsetMap(*container));
/// convert all MPQCData to MPQCDataMap_t
{
ASSERT( ForceKeySet.KeySets.size() == fragmentData.size(),
"FragmentationAutomationAction::performCall() - ForceKeySet's KeySets and fragmentData differ in size.");
typedef boost::mpl::remove::type MPQCDataEnergyVector_noeigenvalues_t;
std::vector Result_Energy_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Grid_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Time_fused(
SumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
// force has extra converter
std::vector MPQCData_Force_fused;
convertMPQCDatatoForceMap(fragmentData, ForceKeySet, MPQCData_Force_fused);
std::vector Result_Force_fused(subsetmap->getMaximumSubsetLevel());
AllLevelOrthogonalSummator forceSummer(
subsetmap,
MPQCData_Force_fused,
jobids,
container->getContainer(),
MatrixNrLookup,
Result_Force_fused);
boost::mpl::for_each(boost::ref(forceSummer));
// obtain full grid
full_sample = boost::fusion::at_key(Result_Grid_fused.back());
// print tables (without eigenvalues, they go extra)
const size_t MaxLevel = subsetmap->getMaximumSubsetLevel();
const std::string energyresult =
writeTable()(
Result_Energy_fused, MaxLevel);
LOG(0, "Energy table is \n" << energyresult);
const std::string eigenvalueresult;
LOG(0, "Eigenvalue table is \n" << eigenvalueresult);
const std::string forceresult =
writeTable()(
Result_Force_fused, MaxLevel);
LOG(0, "Force table is \n" << forceresult);
// we don't want to print grid to a table
// print times (without flops for now)
typedef boost::mpl::remove::type MPQCDataTimeVector_noflops_t;
const std::string timesresult =
writeTable()(
Result_Time_fused, MaxLevel);
LOG(0, "Times table is \n" << timesresult);
}
// combine all found data
if (!KeySet.ParseManyBodyTerms()) return false;
EnergyMatrix EnergyFragments;
ForceMatrix ForceFragments;
if (!EnergyFragments.AllocateMatrix(Energy.Header, Energy.MatrixCounter, Energy.RowCounter, Energy.ColumnCounter)) return false;
if (!ForceFragments.AllocateMatrix(Force.Header, Force.MatrixCounter, Force.RowCounter, Force.ColumnCounter)) return false;
if(!Energy.SetLastMatrix(0., 0)) return false;
if(!Force.SetLastMatrix(0., 2)) return false;
for (int BondOrder=0;BondOrder &fragmentresults,
const std::vector &longrangeresults,
const std::vector &fragmentData,
const std::vector &longrangeData,
const VMGData &fullsolutionData,
const std::string &KeySetFilename,
size_t NoAtoms,
SamplingGrid &full_sample)
{
// create a vector of all job ids from short-range
std::vector jobids(fragmentresults.size(), JobId::IllegalJob);
std::transform(fragmentresults.begin(), fragmentresults.end(), jobids.begin(),
boost::bind(&FragmentResult::getId,
boost::bind(&FragmentResult::ptr::operator->, _1)));
// create lookup from job nr to fragment number
std::map< JobId_t, size_t > MatrixNrLookup;
size_t FragmentCounter = 0;
createMatrixNrLookup(jobids, MatrixNrLookup, FragmentCounter);
// initialise keysets
KeySetsContainer KeySet;
KeySetsContainer ForceKeySet;
{
// else needs keysets without hydrogens
std::stringstream filename;
filename << FRAGMENTPREFIX << KEYSETFILE;
if (!KeySet.ParseKeySets(KeySetFilename, filename.str(), FragmentCounter)) return false;
}
{
// forces need keysets including hydrogens
std::stringstream filename;
filename << FRAGMENTPREFIX << FORCESFILE;
if (!ForceKeySet.ParseKeySets(KeySetFilename, filename.str(), FragmentCounter)) return false;
}
/// prepare for OrthogonalSummation
// convert KeySetContainer to IndexSetContainer
IndexSetContainer::ptr container(new IndexSetContainer(KeySet));
// create the map of all keysets
SubsetMap::ptr subsetmap(new SubsetMap(*container));
/// convert all MPQCData to MPQCDataMap_t
{
typedef boost::mpl::remove::type MPQCDataEnergyVector_noeigenvalues_t;
std::vector Result_Energy_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Grid_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Time_fused(
SumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
std::vector Result_Fragment_fused(
OrthogonalSumUpPerLevel(
fragmentData, jobids, MatrixNrLookup, container, subsetmap));
// force has extra converter
std::vector MPQCData_Force_fused;
convertMPQCDatatoForceMap(fragmentData, ForceKeySet, MPQCData_Force_fused);
std::vector Result_Force_fused(subsetmap->getMaximumSubsetLevel());
AllLevelOrthogonalSummator forceSummer(
subsetmap,
MPQCData_Force_fused,
jobids,
container->getContainer(),
MatrixNrLookup,
Result_Force_fused);
boost::mpl::for_each(boost::ref(forceSummer));
// obtain full grid
std::vector VMGData_Potential_fused;
convertDataTo(longrangeData, VMGData_Potential_fused);
OrthogonalFullSummator potentialSummer(
subsetmap,
VMGData_Potential_fused,
jobids,
container->getContainer(),
MatrixNrLookup);
potentialSummer(subsetmap->getMaximumSubsetLevel());
OrthogonalFullSummator epotentialSummer(
subsetmap,
VMGData_Potential_fused,
jobids,
container->getContainer(),
MatrixNrLookup);
epotentialSummer(subsetmap->getMaximumSubsetLevel());
SamplingGrid full_sample = fullsolutionData.sampled_potential;
LOG(0, "Remaining long-range energy from energy_potential is " << full_sample.integral()-epotentialSummer.getFullContribution() << ".");
full_sample -= potentialSummer.getFullContribution();
LOG(0, "Remaining long-range energy from potential integral is " << full_sample.integral() << ".");
OrthogonalFullSummator elongSummer(
subsetmap,
VMGData_Potential_fused,
jobids,
container->getContainer(),
MatrixNrLookup);
elongSummer(subsetmap->getMaximumSubsetLevel());
double e_long = fullsolutionData.e_long;
e_long -= elongSummer.getFullContribution();
LOG(0, "Remaining long-range energy is " << e_long << ".");
// print tables (without eigenvalues, they go extra)
const size_t MaxLevel = subsetmap->getMaximumSubsetLevel();
const std::string energyresult =
writeTable()(
Result_Energy_fused, MaxLevel);
LOG(0, "Energy table is \n" << energyresult);
const std::string eigenvalueresult;
LOG(0, "Eigenvalue table is \n" << eigenvalueresult);
const std::string forceresult =
writeTable()(
Result_Force_fused, MaxLevel);
LOG(0, "Force table is \n" << forceresult);
// we don't want to print grid to a table
// print times (without flops for now)
typedef boost::mpl::remove::type MPQCDataTimeVector_noflops_t;
const std::string timesresult =
writeTable()(
Result_Time_fused, MaxLevel);
LOG(0, "Times table is \n" << timesresult);
}
return true;
}
void RunService(
boost::asio::io_service &io_service,
std::string message)
{
message = std::string("io_service: ") + message;
io_service.reset();
Info info(message.c_str());
io_service.run();
}
void requestIds(
FragmentController &controller,
const FragmentationFragmentationAutomationAction::FragmentationFragmentationAutomationParameters ¶ms,
const size_t numberjobs)
{
controller.requestIds(params.host.get(), params.port.get(), numberjobs);
}
bool createJobsFromFiles(
FragmentController &controller,
const FragmentationFragmentationAutomationAction::FragmentationFragmentationAutomationParameters ¶ms,
const std::vector< boost::filesystem::path > &jobfiles)
{
std::vector jobs;
for (std::vector< boost::filesystem::path >::const_iterator iter = jobfiles.begin();
iter != jobfiles .end(); ++iter) {
const std::string &filename = (*iter).string();
if (boost::filesystem::exists(filename)) {
const JobId_t next_id = controller.getAvailableId();
LOG(1, "INFO: Creating MPQCCommandJob with filename'"
+filename+"', and id "+toString(next_id)+".");
parsejob(jobs, params.executable.get().string(), filename, next_id);
} else {
ELOG(1, "Fragment job "+filename+" does not exist.");
return false;
}
}
controller.addJobs(jobs);
controller.sendJobs(params.host.get(), params.port.get());
return true;
}
#ifdef HAVE_VMG
bool createLongRangeJobs(
FragmentController &controller,
const FragmentationFragmentationAutomationAction::FragmentationFragmentationAutomationParameters ¶ms,
const std::vector &fragmentData,
const SamplingGrid &full_sampled_grid,
const Fragment &full_fragment)
{
std::vector jobs;
// add one job for each fragment as the short-range correction which we need
// to subtract from the obtained full potential to get the long-range part only
for (std::vector::const_iterator iter = fragmentData.begin();
iter != fragmentData.end(); ++iter) {
const JobId_t next_id = controller.getAvailableId();
LOG(1, "INFO: Creating VMGJob with " << iter->sampled_grid.sampled_grid.size()
<< " gridpoints and " << iter->charges.size() << " particle charges.");
FragmentJob::ptr testJob(
new VMGJob(next_id, iter->sampled_grid, iter->positions, iter->charges) );
jobs.push_back(testJob);
}
{
const World::AtomComposite &atoms = World::getInstance().getAllAtoms();
std::vector< std::vector > positions;
positions.reserve(atoms.size());
std::vector charges;
charges.reserve(atoms.size());
std::vector position(3, 0.);
for (World::AtomComposite::const_iterator iter = atoms.begin();
iter != atoms.end(); ++iter) {
const Vector &pos = (*iter)->getPosition();
for (size_t i=0;i<3;++i) position[i] = pos[i];
positions.push_back(position);
charges.push_back((double)((*iter)->getElement().getAtomicNumber()));
}
const JobId_t next_id = controller.getAvailableId();
LOG(1, "INFO: Creating full VMGJob with " << full_sampled_grid.sampled_grid.size()
<< " gridpoints and " << charges.size() << " particle charges.");
FragmentJob::ptr testJob(
new VMGJob(next_id, full_sampled_grid, positions, charges) );
jobs.push_back(testJob);
}
// then send jobs to controller
controller.addJobs(jobs);
controller.sendJobs(params.host.get(), params.port.get());
return true;
}
#endif
void WaitforResults(
boost::asio::io_service &io_service,
FragmentController &controller,
const FragmentationFragmentationAutomationAction::FragmentationFragmentationAutomationParameters ¶ms,
const size_t NoExpectedResults
)
{
size_t NoCalculatedResults = 0;
while (NoCalculatedResults != NoExpectedResults) {
// wait a bit
boost::asio::deadline_timer timer(io_service);
timer.expires_from_now(boost::posix_time::milliseconds(500));
timer.wait();
// then request status
controller.checkResults(params.host.get(), params.port.get());
RunService(io_service, "Checking on results");
const std::pair JobStatus = controller.getJobStatus();
LOG(1, "INFO: #" << JobStatus.first << " are waiting in the queue and #" << JobStatus.second << " jobs are calculated so far.");
NoCalculatedResults = JobStatus.second;
}
}
Action::state_ptr FragmentationFragmentationAutomationAction::performCall() {
boost::asio::io_service io_service;
FragmentController controller(io_service);
// TODO: Have io_service run in second thread and merge with current again eventually
// Phase One: obtain ids
std::vector< boost::filesystem::path > jobfiles = params.jobfiles.get();
requestIds(controller, params, jobfiles.size());
RunService(io_service, "Requesting ids");
// Phase Two: create and add MPQCJobs
if (!createJobsFromFiles(controller, params, jobfiles))
return Action::failure;
RunService(io_service, "Adding MPQCJobs");
// Phase Three: calculate result
WaitforResults(io_service, controller, params, jobfiles.size());
controller.receiveResults(params.host.get(), params.port.get());
RunService(io_service, "Requesting short-range results");
std::vector MPQCresults = controller.getReceivedResults();
std::vector fragmentData;
ConvertFragmentResultTo(MPQCresults, fragmentData);
#ifdef HAVE_VMG
if (params.DoLongrange.get()) {
ASSERT( World::getInstance().getAllAtoms().size() != 0,
"FragmentationFragmentationAutomationAction::performCall() - please load the full molecule into the World before.");
// create a vector of all job ids
std::vector jobids(MPQCresults.size(), JobId::IllegalJob);
std::transform(MPQCresults.begin(), MPQCresults.end(), jobids.begin(),
boost::bind(&FragmentResult::getId,
boost::bind(&FragmentResult::ptr::operator->, _1)));
// obtain combined charge density
LOG(1, "INFO: Parsing fragment files from " << params.path.get() << ".");
SamplingGrid full_sample;
Fragment full_fragment;
sumUpChargeDensity(
jobids,
fragmentData,
params.path.get(),
full_sample,
full_fragment);
// Phase Four: obtain more ids
requestIds(controller, params, fragmentData.size()+1);
RunService(io_service, "Requesting ids");
// Phase Five: create VMGJobs
if (!createLongRangeJobs(controller, params, fragmentData, full_sample, full_fragment))
return Action::failure;
RunService(io_service, "Adding VMGJobs");
// Phase Six: calculate result
WaitforResults(io_service, controller, params, fragmentData.size()+1);
controller.receiveResults(params.host.get(), params.port.get());
RunService(io_service, "Requesting long-range results");
std::vector VMGresults = controller.getReceivedResults();
ASSERT( MPQCresults.size()+1 == VMGresults.size(),
"FragmentationFragmentationAutomationAction::performCall() - number of MPQCresultd and VMGresults don't match.");
std::vector longrangeData;
ConvertFragmentResultTo(VMGresults, longrangeData);
// remove full solution from vector, has to be treated extra
VMGData fullsolutionData = longrangeData.back();
longrangeData.pop_back();
// Final phase: print result
{
LOG(1, "INFO: Parsing fragment files from " << params.path.get() << ".");
printReceivedFullResults(
MPQCresults,
VMGresults,
fragmentData,
longrangeData,
fullsolutionData,
params.path.get(),
getNoAtomsFromAdjacencyFile(params.path.get()),
full_sample);
}
}
#else
// Final phase: print result
{
LOG(1, "INFO: Parsing fragment files from " << params.path.get() << ".");
printReceivedMPQCResults(
jobids,
fragmentData,
params.path.get(),
getNoAtomsFromAdjacencyFile(params.path.get()),
full_sample);
}
#endif
size_t Exitflag = controller.getExitflag();
return (Exitflag == 0) ? Action::success : Action::failure;
}
Action::state_ptr FragmentationFragmentationAutomationAction::performUndo(Action::state_ptr _state) {
return Action::success;
}
Action::state_ptr FragmentationFragmentationAutomationAction::performRedo(Action::state_ptr _state){
return Action::success;
}
bool FragmentationFragmentationAutomationAction::canUndo() {
return false;
}
bool FragmentationFragmentationAutomationAction::shouldUndo() {
return false;
}
/** =========== end of function ====================== */