Skip to content

Commit

Permalink
[skip ci] Merge pull request #35 from rikigigi/devel
Browse files Browse the repository at this point in the history
added option to dump the same binary trajetory after specifying a sta…
  • Loading branch information
rikigigi authored Nov 1, 2023
2 parents 9084d94 + 0031fa9 commit b671ab4
Show file tree
Hide file tree
Showing 16 changed files with 2,046 additions and 1,422 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/cmake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -103,5 +103,5 @@ jobs:
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: |
copy ${{github.workspace}}/build/${{env.BUILD_TYPE}}/pyanalisi* .
pip install pytest pytest-regressions pandas matplotlib numpy scipy testbook k3d
pip install pytest pytest-regressions pandas matplotlib numpy scipy testbook k3d ipykernel
pytest -sv .
16 changes: 13 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,25 @@ if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Linux")
endif()


function(TRY_ADD_CXX_COMPILER_LINKER_FLAG FLAG TARGET)

set (_saved_CRL ${CMAKE_REQUIRED_LIBRARIES})
set (CMAKE_REQUIRED_LIBRARIES ${FLAG})
check_cxx_compiler_flag ("${FLAG}" ${TARGET})
set (CMAKE_REQUIRED_LIBRARIES ${_saved_CRL})
message(STATUS "checked ${FLAG}")
endfunction()

include(CheckCXXCompilerFlag)
include(CheckCCompilerFlag)
#set(SAFE_CMAKE_REQUIRED_LINK_OPTIONS "${CMAKE_REQUIRED_LINK_OPTIONS}")
#set(CMAKE_REQUIRED_LINK_OPTIONS "-fsanitize=address")
#CHECK_CXX_COMPILER_FLAG("-fsanitize=address" COMPILER_HAS_SANITIZER_ADDR)
#set(CMAKE_REQUIRED_LINK_OPTIONS "${SAFE_CMAKE_REQUIRED_LINK_OPTIONS}")
#unset(SAFE_CMAKE_REQUIRED_LINK_OPTIONS)
CHECK_CXX_COMPILER_FLAG(" -fsanitize=address " COMPILER_HAS_SANITIZER_ADDR)
CHECK_CXX_COMPILER_FLAG(" -fsanitize=leak " COMPILER_HAS_SANITIZER_LEAK)
CHECK_CXX_COMPILER_FLAG(" -fsanitize=undefined " COMPILER_HAS_SANITIZER_UNDEF)
TRY_ADD_CXX_COMPILER_LINKER_FLAG(" -fsanitize=address " COMPILER_HAS_SANITIZER_ADDR)
TRY_ADD_CXX_COMPILER_LINKER_FLAG(" -fsanitize=leak " COMPILER_HAS_SANITIZER_LEAK)
TRY_ADD_CXX_COMPILER_LINKER_FLAG(" -fsanitize=undefined " COMPILER_HAS_SANITIZER_UNDEF)
#CHECK_CXX_COMPILER_FLAG(" -ffpe-trap=invalid,overflow " COMPILER_HAS_FTRAP)
if (COMPILER_HAS_SANITIZER_ADDR)
set(COMPILER_SANITIZE_FLAGS " ${COMPILER_SANITIZE_FLAGS} -fsanitize=address")
Expand Down Expand Up @@ -176,6 +185,7 @@ set (ANALISI_LIB
lib/src/calcoliblocchi.cpp
lib/src/convertibinario.cpp
lib/src/convolution.cpp
lib/src/correlatorespaziale.cpp
lib/src/cronometro.C
lib/src/doubleround.cpp
lib/src/gofrt.cpp
Expand Down
65 changes: 61 additions & 4 deletions analisi/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@

#include "blockaverage.h"

#include <thread>
#include <chrono>

namespace std{

template<typename A, typename B>
Expand Down Expand Up @@ -168,6 +171,7 @@ int main(int argc, char ** argv)
std::vector<double> factors_input;
std::vector<std::string> headers,output_conversion_gro;
std::vector< std::pair <unsigned int,unsigned int > > cvar;
std::array< size_t, 3 > start_stop_skip;

options.add_options()
("input,i",boost::program_options::value<std::string>(&input)->default_value(""), "input file in binary LAMMPS format: id type xu yu zu vx vy vz")
Expand Down Expand Up @@ -205,7 +209,7 @@ int main(int argc, char ** argv)
("subBlock,k",boost::program_options::value<unsigned int>(&n_seg)->default_value(1),"optimization option for the green-kubo calculation. It is the number of sub-blocks when calculating averages inside the big block used to calculate the variance. The performance depends on the particular system used.")
("kk",boost::program_options::bool_switch(&bench)->default_value(false),"small benchmark to find a good value of -k")
("kk-range",boost::program_options::value<std::vector<unsigned int > >(&kk_l)->multitoken(),"range where the -k benchmark has to be performed")
("binary-convert",boost::program_options::value<std::string>(&output_conversion),"perform the conversion from txt to binary LAMMPS trajectory file. The name of the output binary file is specified here")
("binary-convert",boost::program_options::value<std::string>(&output_conversion),"The name of the output binary file is specified here. If given alone, perform the conversion from txt to binary LAMMPS trajectory file. If --cut is specified, the input is a different binary trajectory.")
#ifdef XDR_FILE
("binary-convert-gromacs",boost::program_options::value<std::vector<std::string>>(&output_conversion_gro)->multitoken(),"\
perform the conversion from gromacs format to the LAMMPS binary. Here you have to specify the output LAMMPS binary and the input type file with format:\n id0 type0\n ...\nidN typeN\nwith atomic types in the same order of the trr gromacs file. The trr gromacs file is specified with -i.")
Expand All @@ -215,6 +219,7 @@ int main(int argc, char ** argv)
("spherical-harmonics-correlation,Y",boost::program_options::value<unsigned int>(&sph)->default_value(0),"perform the calculation of the correlation function of the atomic density expanded in spherical harmonics. Note that this is a very heavy computation, first do a small test (for example with a very high -S or a low -s value). Here you have to specify the number of different bins of the considered radial distances, specified with -F. The code will calculate a correlation function for each bin.")
("buffer-size",boost::program_options::value<unsigned int>(&buffer_size)->default_value(30),"Buffer size for sh frame values. This is machine dependend and can heavily influence the performance of the code")
("lt",boost::program_options::value<unsigned int> (&read_lines_thread)->default_value(200),"parameter to read faster the time series column formatted txt file. It specifies the number of lines to read in one after the other for each thread")
("cut",boost::program_options::value(&start_stop_skip)->multitoken(),"Specify [start stop skip] as parameters. Write a new binary trajectory by taking all the timesteps between index start and stop that satisfy (idx-start)%skip==0. Index of the first timestep is 0.")
#ifdef EXPERIMENTAL
("spatial-correlator,A",boost::program_options::value(&nk)->default_value({0,0.0})->multitoken(),"Numero di punti della griglia ...")
("spatial-correlator-dir",boost::program_options::value(&kdir)->default_value({1.0,0.0,0.0})->multitoken(),"Direzione di k")
Expand All @@ -238,14 +243,26 @@ int main(int argc, char ** argv)


boost::program_options::notify(vm);

if (argc<=1 || ( (output_conversion!="" || output_conversion_gro.size()>0 ) && input=="") ||vm.count("help")|| (vm.count("loginput")==0 && ( debug2 || heat_coeff ) ) || skip<=0 || stop_acf<0 || final<0 || (!sub_mean && (sub_mean_start!=0) ) || sub_mean_start<0 || !(kk_l.size()==0 || kk_l.size()==2)){
std::cerr <<"cm.count " << vm.count("cut") <<std::endl;
if ( argc<=1
|| ( (output_conversion!="" || output_conversion_gro.size()>0 ) && input=="")
|| ( output_conversion=="" && vm.count("cut") > 0 )
|| vm.count("help")
|| (vm.count("loginput")==0 && ( debug2 || heat_coeff ) )
|| skip<=0
|| stop_acf<0
|| final<0
|| (!sub_mean && (sub_mean_start!=0) )
|| sub_mean_start<0
|| !(kk_l.size()==0 || kk_l.size()==2)
){
std::cout << options << "\n";
if (vm.count("help"))
return 0;
return 1;
}


if (cvar_list.size()%2 != 0) {
std::cout << "Error: covariance indexes list must contain an even number of elements\n";
std::cout << options << "\n";
Expand All @@ -269,7 +286,7 @@ int main(int argc, char ** argv)

try {

if (output_conversion!="") {
if (output_conversion!="" && vm.count("cut") == 0 ) {

ConvertiBinario conv(input,output_conversion);
return 0;
Expand All @@ -285,6 +302,46 @@ int main(int argc, char ** argv)
}
}

if (vm.count("cut") > 0 ) {
if (start_stop_skip[2]==0) {
std::cerr << "Error: you must specify a positive skip"<<std::endl;
return 1;
}
Trajectory t(input);
if (start_stop_skip[1]==0) {
start_stop_skip[1]=t.get_ntimesteps();
}
std::cout << "dumping the trajectory from "<<start_stop_skip[0] << " to "<<
start_stop_skip[1] << " every " << start_stop_skip[2] << " steps..."<<std::endl;

size_t written;
auto thread_exception = std::exception_ptr();
bool finished=false;
auto dumper_thread = std::thread([&](){
try {
t.dump_every(0,start_stop_skip[0],start_stop_skip[1],start_stop_skip[2],output_conversion.c_str(),written);
finished=true;
} catch (...) {
finished=true;
thread_exception = std::current_exception();
}
}
);
while(! finished) {
for (int i=0;i<100;++i) {
if (finished) break;
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
std::cout << "Written "<<written <<" steps"<<std::endl;
}

dumper_thread.join();
if (thread_exception) {
std::rethrow_exception(thread_exception);
}
return 0;
}



#ifdef FFTW3_THREADS
Expand Down
2 changes: 1 addition & 1 deletion config.h.in
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ const static char * _info_msg=
#ifdef XDR_FILE
"\nWith gromacs XDR file conversion support"
#endif
"\nv0.5.1"
"\nv0.5.2"
;


Expand Down
2 changes: 1 addition & 1 deletion install/install_python.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fi

mkdir -p "$SP_DIR/pyanalisi"
$CP "$BUILD_DIR"/pyanalisi*.so "$SP_DIR/pyanalisi/"
for f in common.py __init__.py
for f in common.py trajectory.py analysis.py plotters.py __init__.py
do
$CP "$SOURCE_DIR/pyanalisi/$f" "$SP_DIR/pyanalisi/"
done
Expand Down
2 changes: 1 addition & 1 deletion install/install_test.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
pip install pytest pytest-regressions pandas matplotlib numpy scipy testbook k3d
pip install pytest pytest-regressions pandas matplotlib numpy scipy testbook k3d ipykernel
7 changes: 7 additions & 0 deletions lib/include/trajectory.h
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,13 @@ class Trajectory : public BaseTrajectory<Trajectory>
void index_all();
int * get_lammps_id();
int *get_lammps_type();
/*
* write a new binary file by copying the timesteps starting at timestep index start to
* timestep index stop. This is the closed interval [start, stop] taking one every skip
* steps. Starts readint at offset and return the offset of the next timestep in the file
* constantly updates written to the number of timesteps written in the new file.
* */
size_t dump_every(size_t offset, const size_t start, const size_t stop, const size_t skip, const char * fname, size_t & written);
private:
std::map<int,unsigned int>id_map;
size_t * timesteps; // puntatori (offset rispetto all'inizio) all'inizio di ogni timesteps
Expand Down
19 changes: 18 additions & 1 deletion lib/src/trajectory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
#include <stdexcept>
#include <sstream>
#include "macros.h"

#include <fstream>



Expand Down Expand Up @@ -316,6 +316,23 @@ size_t Trajectory::leggi_pezzo(const size_t &partenza /// offset da cui partire

}

size_t Trajectory::dump_every(size_t offset, const size_t start, const size_t stop, const size_t skip, const char* fname, size_t & written) {
std::ofstream outfile(fname,std::ofstream::binary);
size_t timestep_size = 0;
written=0;
for (size_t i = 0; i<=stop && i<n_timesteps; ++i){
TimestepManager header;
Chunk * ch;
timestep_size = leggi_pezzo<false>(offset, header,ch);
if (i>=start && i<=stop && (i-start)%skip==0 ) {
outfile.write(file+offset,timestep_size);
written+=1;
}
offset+=timestep_size;
}
return offset;
}

/**
* Sistema i puntatori con i dati del timestep e
* restituisce la dimensione in byte del pezzo letto, comprensiva dei chunk con i dati veri e propri che però non vengono restituiti
Expand Down
13 changes: 10 additions & 3 deletions notebooks/calc_inspector.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"metadata": {},
"outputs": [],
"source": [
"import pyanalisi as pa\n",
"import pyanalisi as pa\n",
"import numpy as np"
]
},
Expand Down Expand Up @@ -95,7 +95,7 @@
},
"outputs": [],
"source": [
"density=pa.atomic_density(traj)\n",
"density=pa.atomic_density(pa.trajectory.Trajectory(traj))\n",
"pa.density_field(*density)"
]
},
Expand Down Expand Up @@ -127,6 +127,13 @@
"source": [
"shp=pa.plot_sh(0.7,1.4,times,sh,1,0,0,log=False,pre_fit=0.4)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -145,7 +152,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.12"
"version": "3.11.0"
}
},
"nbformat": 4,
Expand Down
24 changes: 16 additions & 8 deletions notebooks/example2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"metadata": {},
"outputs": [],
"source": [
"phase_IV=read_lammps_bin('./dump2.8fs.bin',wrap=False)"
"phase_IV=read_lammps_bin('../tests/data/lammps.bin',wrap=False,dt=1.0)"
]
},
{
Expand All @@ -44,7 +44,7 @@
"metadata": {},
"outputs": [],
"source": [
"plt.plot(phase_IV.get_array('positions')[:,phase_IV.t.get_lammps_type()==1,0][:,:10])"
"plt.plot(phase_IV.get_array('positions')[:,phase_IV.get_analisi_traj().get_lammps_type()==1,0][:,:10])"
]
},
{
Expand All @@ -53,7 +53,7 @@
"metadata": {},
"outputs": [],
"source": [
"pl=show_traj(phase_IV,wrap=False)"
"pl=phase_IV.show_traj()"
]
},
{
Expand All @@ -66,7 +66,7 @@
"source": [
"res=multiinspect([phase_IV], plot=True, prefix='phase_IV_nose_',\n",
" inspect_kw={\n",
" 'nthreads':12,\n",
" 'nthreads':4,\n",
" 'do_sh':False,\n",
" 'plot_st_kw': {\n",
" 'transpose':True,\n",
Expand Down Expand Up @@ -99,21 +99,29 @@
},
"outputs": [],
"source": [
"steinhardt_movie(phase_IV,skip=20,n_segments=150,\n",
"html,_=steinhardt_movie(phase_IV,skip=20,n_segments=15,\n",
" plt_steinhardt_kw={\n",
" 'transpose':True,\n",
" 'xmax':.40,\n",
" 'ymax':.70,\n",
" 'single':(1,1)},\n",
" compute_steinhardt_kw={'nthreads':12},\n",
" neigh=[(55,3.5**2,0.0),(65,3.5**2,0.0)]\n",
" )"
" )\n",
"html"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
Expand All @@ -127,7 +135,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
"version": "3.11.0"
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions pyanalisi/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from pyanalisi.pyanalisi import *
from pyanalisi.common import *
from .pyanalisi import *
from .common import *
Loading

0 comments on commit b671ab4

Please sign in to comment.