diff --git a/.gitignore b/.gitignore index 3f3494c..20c6226 100644 --- a/.gitignore +++ b/.gitignore @@ -1,24 +1,5 @@ -*.lo -*.la +.vscode/ +build/ *.o *.so -.deps -.libs -aclocal.m4 -autom4te.cache -configure -Makefile -Makefile.in -INSTALL -m4 -py-compile -config.* -src/psftest -test/test_psfdataset -missing -ltmain.sh -libtool -libpsf.pc -libpsf-uninstalled.pc -depcomp *~ diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..69aba94 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,25 @@ +# https://stackoverflow.com/questions/17511496/how-to-create-a-shared-library-with-cmake +# https://github.com/giuliopaci/cmake-tutorial/blob/master/CMakeLists.txt + +cmake_minimum_required(VERSION 3.12) + +project(libpsf VERSION 0.3 + DESCRIPTION "Load Cadence Spectre PSF simulation data" + LANGUAGES CXX) + +option(WITH_PYTHON "Build python bindings" ON) + +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED True) +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME) + include(CTest) +endif() + +add_subdirectory(src) +add_subdirectory(test) + +if(WITH_PYTHON) + add_subdirectory(bindings) +endif() \ No newline at end of file diff --git a/ChangeLog b/ChangeLog deleted file mode 100644 index e69de29..0000000 diff --git a/Makefile.am b/Makefile.am deleted file mode 100644 index 6791395..0000000 --- a/Makefile.am +++ /dev/null @@ -1,14 +0,0 @@ -if ENABLE_TESTS - MAYBE_TEST = test -endif - -SUBDIRS = \ - src \ - include \ - bindings \ - $(MAYBE_TEST) - -pkgconfigdir=$(libdir)/pkgconfig -pkgconfig_DATA=libpsf.pc - -README: README.rst diff --git a/NEWS b/NEWS deleted file mode 100644 index e69de29..0000000 diff --git a/README.rst b/README.rst index 4a1d419..38a27a5 100644 --- a/README.rst +++ b/README.rst @@ -3,34 +3,44 @@ libpsf is a c++ library that reads Cadence PSF waveform files Install ======= -Install prerequisits +Install prerequisites -------------------- +If building without python binding, only cmake and boost are required -On a debian based system you can run the following to install the +- On a debian based system you can run the following to install the packages needed to build libpsf: -sudo apt-get install autoconf automake libtool libboost-all-dev python-numpy-dev + $ sudo apt-get install cmake libboost-all-dev python-numpy-dev cython cppunit + +- Otherwise conda can be used to install the following packages: + + $ conda install python numpy cython cmake + + Then install boost libraries and set + + $ export BOOST_LOC= Build and install ----------------- -To build and install the library:: +- From root directory, create build directory - ./autogen.sh - make - sudo make install + $ mkdir build && cd build +- Run cmake configuration -To build the python extension:: + $ cmake .. -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=RELEASE -DWITH_PYTHON=ON - ./autogen.sh --with-python - make - sudo make install + `CONDA_PREFIX` is the destination where you want libpsf to be installed + To build without the python binding, just set `-DWITH_PYTHON=OFF` +- Build + $ make +- To run tests, [cppunit](https://www.freedesktop.org/wiki/Software/cppunit) is required. + + $ ctest -Running the tests ------------------ -Install cppunit, then compile and run the tests in the test dir:: + `ctest --verbose` to see individual test result outputs + +- Install - sudo apt-get install libcppunit-dev - cd test - make - ./test_psfdataset + $ make install + diff --git a/autogen.sh b/autogen.sh deleted file mode 100755 index 6dec92d..0000000 --- a/autogen.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -e -test -n "$srcdir" || srcdir=`dirname "$0"` -test -n "$srcdir" || srcdir=. - -autoreconf --force --install --verbose "$srcdir" -Im4 -test -n "$NOCONFIGURE" || "$srcdir/configure" "$@" diff --git a/bindings/CMakeLists.txt b/bindings/CMakeLists.txt new file mode 100644 index 0000000..8e5f91a --- /dev/null +++ b/bindings/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(python) diff --git a/bindings/Makefile.am b/bindings/Makefile.am deleted file mode 100644 index 99ec3a9..0000000 --- a/bindings/Makefile.am +++ /dev/null @@ -1,6 +0,0 @@ -if WITH_PYTHONBINDINGS -MAYBE_PYTHONBINDINGS = python -endif - -SUBDIRS = $(MAYBE_PYTHONBINDINGS) - diff --git a/bindings/python/.gitignore b/bindings/python/.gitignore new file mode 100644 index 0000000..5fa7484 --- /dev/null +++ b/bindings/python/.gitignore @@ -0,0 +1,3 @@ +libpsf.cpp +libpsf.h +*.pyc diff --git a/bindings/python/CMakeLists.txt b/bindings/python/CMakeLists.txt new file mode 100644 index 0000000..77b0973 --- /dev/null +++ b/bindings/python/CMakeLists.txt @@ -0,0 +1,46 @@ +find_package(Python COMPONENTS Interpreter Development) +find_program(CYTHON_EXECUTABLE NAMES cython cython3 + HINTS ${_python_path}) +# find_program(Python REQUIRED COMPONENTS Interpreter Development NumPy) +# https://bloerg.net/posts/cmake-and-distutils/ +if( Python_EXECUTABLE AND CYTHON_EXECUTABLE) + MESSAGE( STATUS "numpy headers found at: ${Python_NumPy_INCLUDE_DIRS}") + MESSAGE( STATUS "cython found at: ${CYTHON_EXECUTABLE}") + + # set variables for setup.py.in + get_target_property(LIBPSF_BUILD_DIR psf BINARY_DIR) + get_target_property(LIBPSF_INCLUDE psf INCLUDE_DIRECTORIES) + + set(SETUP_PY_IN "${CMAKE_CURRENT_SOURCE_DIR}/setup.py.in") + set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py") + set(DEPS setup.py.in psfpython.h psfpython.cc libpsf.pyx cpp_defs.pxd) + set(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/build/timestamp") + configure_file(${SETUP_PY_IN} ${SETUP_PY}) + # build commands + add_custom_command(OUTPUT ${OUTPUT} + COMMAND ${Python_EXECUTABLE} ${SETUP_PY} build_ext --inplace + COMMAND ${Python_EXECUTABLE} ${SETUP_PY} bdist_wheel + COMMAND ${CMAKE_COMMAND} -E touch ${OUTPUT} + DEPENDS ${DEPS}) + + add_custom_target(python_binding ALL DEPENDS ${OUTPUT}) + add_dependencies(python_binding psf) + # install binding + install(CODE "execute_process(COMMAND ${Python_EXECUTABLE} ${SETUP_PY} install)") + + + install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/dist + DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}) + + # tests + if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME AND BUILD_TESTING) + add_test(NAME python_test + COMMAND ${Python_EXECUTABLE} -m unittest test_psfdataset.py -v + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/tests") + set_tests_properties(python_test PROPERTIES + ENVIRONMENT PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}) + endif() + +else() + MESSAGE( WARNING "python/numpy/cython include not found, skipping python bindings") +endif() diff --git a/bindings/python/Makefile.am b/bindings/python/Makefile.am deleted file mode 100644 index 6da057b..0000000 --- a/bindings/python/Makefile.am +++ /dev/null @@ -1,17 +0,0 @@ -EXTRA_DIST = setup.py - -if WITH_PYTHONBINDINGS - -all-local: - $(PYTHON) setup.py build_ext --include-dirs=../../include:@BOOST_ROOT_PATH@/include --library-dirs=../../src/.libs:@BOOST_ROOT_PATH@/lib - -install-exec-local: - $(PYTHON) setup.py install --prefix=$(prefix) --root=$(DESTDIR) - -uninstall-local: - rm -rf $(DESTDIR)$(libdir)/python*/*-packages/*libpsf* - -clean-local: - $(PYTHON) setup.py clean --all - -endif \ No newline at end of file diff --git a/bindings/python/__init__.py b/bindings/python/__init__.py deleted file mode 100644 index d39e8e5..0000000 --- a/bindings/python/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from _psf import * diff --git a/bindings/python/cpp_defs.pxd b/bindings/python/cpp_defs.pxd new file mode 100644 index 0000000..029e474 --- /dev/null +++ b/bindings/python/cpp_defs.pxd @@ -0,0 +1,33 @@ +# https://cython.readthedocs.io/en/latest/src/userguide/wrapping_CPlusPlus.html +# https://stackoverflow.com/a/10121232 +# https://stackoverflow.com/a/29002414 +from cpython.ref cimport PyObject + +from libcpp cimport bool +from libcpp.string cimport string +from libcpp.vector cimport vector + +cdef extern from "psf.h": + cdef cppclass PSFVector + cdef cppclass PSFBase + +cdef extern from "psfpython.h": + cdef void raise_py_error() + cdef cppclass PropertyMap + +cdef extern from "psf.h": + cdef cppclass PSFDataSet: + PSFDataSet(string) except +raise_py_error + int get_nsweeps() except +raise_py_error + int get_sweep_npoints() except +raise_py_error + const vector[string] get_signal_names() except +raise_py_error + const vector[string] get_sweep_param_names() except +raise_py_error + PSFVector* get_sweep_values() except +raise_py_error + PSFBase *get_signal(string name) except +raise_py_error + PropertyMap& get_signal_properties(string name) except +raise_py_error + PropertyMap& get_header_properties() except +raise_py_error + bool is_swept() except +raise_py_error + void close() except +raise_py_error + void open() except +raise_py_error + void set_invertstruct(bool value) except +raise_py_error + bool get_invertstruct() except +raise_py_error diff --git a/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl b/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl new file mode 100644 index 0000000..63812be Binary files /dev/null and b/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl differ diff --git a/bindings/python/libpsf.pyx b/bindings/python/libpsf.pyx new file mode 100644 index 0000000..6cff6f0 --- /dev/null +++ b/bindings/python/libpsf.pyx @@ -0,0 +1,110 @@ +# distutils: language = c++ +# cython: language_level=2 + +from libcpp cimport bool + +from cpython.ref cimport PyObject +from cpp_defs cimport PSFVector, PSFBase, PropertyMap +from cpp_defs cimport PSFDataSet as C_PSFDataSet + +cimport numpy as np +np.import_array() + +cdef extern from "psfpython.h": + cdef np.ndarray psfvector_to_numpy_array(PSFVector *vec, bool copy) + cdef object psfbase_to_numpy_array(PSFBase *d) + cdef object propertymap_to_python(PropertyMap& propmap) + +class FileOpenError(RuntimeError): + pass + +class NotFound(RuntimeError): + pass + +class DataSetNotOpen(RuntimeError): + pass + +class UnknownType(RuntimeError): + pass + +class IncorrectChunk(RuntimeError): + pass + +cdef public PyObject* fileOpenError = FileOpenError +cdef public PyObject* notFoundError = NotFound +cdef public PyObject* notOpenError = DataSetNotOpen +cdef public PyObject* unknownTypeError = UnknownType +cdef public PyObject* incorrectChunkError = IncorrectChunk + + +cdef to_unicode(str_): + return str_.encode('UTF-8') + +cdef bytes_to_string(str_list): + return map(lambda x: x.decode('UTF-8'), str_list) + + +cdef class PSFDataSet: + cdef C_PSFDataSet *obj + + def __cinit__(self, filename): + self.obj = new C_PSFDataSet(to_unicode(filename)) + + def __dealloc__(self): + del self.obj + + def get_nsweeps(self): + """Return the number of sweeps""" + return self.obj.get_nsweeps() + + def get_sweep_npoints(self): + """Return the number of points in the sweep""" + return self.obj.get_sweep_npoints() + + def get_signal_names(self): + """Return a list of signal names""" + return bytes_to_string(self.obj.get_signal_names()) + + def get_sweep_param_names(self): + """Parameters that have been swept""" + return bytes_to_string(self.obj.get_sweep_param_names()) + + def get_sweep_values(self): + """Numpy array of swept values""" + return psfvector_to_numpy_array(self.obj.get_sweep_values(), False) + + def get_signal(self, signal): + """Numpy array of signal values""" + return psfbase_to_numpy_array(self.obj.get_signal(to_unicode(signal))) + + def get_signal_properties(self, signal): + """Properties of a non swept signal + Throws NotFound exception for non_swept datasets so check self.is_swept + first + """ + return propertymap_to_python(self.obj.get_signal_properties(to_unicode(signal))) + + def get_header_properties(self): + """Dict of header [rp[ertoes and values""" + return propertymap_to_python(self.obj.get_header_properties()) + + def is_swept(self): + """Is the data swept""" + return self.obj.is_swept() + + def close(self): + """Close PSF Data Set""" + self.obj.close() + + def open(self): + """Open PSF Data Set""" + self.obj.open() + + def set_invertstruct(self, bool value): + """Set invert struct""" + self.obj.set_invertstruct(value) + + def get_invertstruct(self): + """Get invert struct""" + return self.obj.get_invertstruct() + diff --git a/bindings/python/psfpython.cc b/bindings/python/psfpython.cc index 4f0a0cd..19a3c8e 100644 --- a/bindings/python/psfpython.cc +++ b/bindings/python/psfpython.cc @@ -1,260 +1,177 @@ -#include "psf.h" -#include "psfdata.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +// patches from https://gitlab.com/bjmuld/libpsf-python -#include +#include +#include -#include +#include "Python.h" +#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION #include +#include "psf.h" +#include "psfdata.h" +#include "psfpython.h" + +#ifndef PY_MAJOR_VERSION + #error could not find python version from python headers +#else + #if PY_MAJOR_VERSION >= 3 + #warning "detected python 3.x"; + #define STRING_INPUT_FN PyUnicode_FromString + #define LONG_INPUT_FN PyLong_FromLong + #else + #warning "detected python 2.x"; + #define STRING_INPUT_FN PyString_FromString + #define LONG_INPUT_FN PyInt_FromLong + #endif +#endif + +// init_numpy hack https://stackoverflow.com/questions/47026900/pyarray-check-gives-segmentation-fault-with-cython-c +int init_numpy(){ + if(_import_array() < 0){ + PyErr_Print(); + PyErr_SetString(PyExc_ImportError, "numpy.core.multiarray failed to import"); + return false; + } + return true; +} -using namespace boost::python; -namespace py = boost::python; - -template -struct VecToList -{ - static PyObject* convert(const std::vector& vec) - { - boost::python::list* l = new boost::python::list(); - for(size_t i = 0; i < vec.size(); i++) - (*l).append(vec[i]); - - return l->ptr(); - } -}; - -struct Struct_to_python { - static PyObject *convert(const Struct& s); -}; +const static int numpy_initialized = init_numpy(); + +extern PyObject *fileOpenError; +extern PyObject *notFoundError; +extern PyObject *notOpenError; +extern PyObject *unknownTypeError; +extern PyObject *incorrectChunkError; + +void raise_py_error(){ + try{ + throw; + }catch (FileOpenError& e){ + std::string msg = "File Open Error"; + PyErr_SetString(fileOpenError, msg.c_str()); + }catch (NotFound& e){ + std::string msg = "Signal not found"; + PyErr_SetString(notFoundError, msg.c_str()); + }catch(DataSetNotOpen& e){ + std::string msg = "Dataset not open"; + PyErr_SetString(notOpenError, msg.c_str()); + }catch(UnknownType& e){ + std::stringstream msg; msg << "Unknown type " << e.type_id; + PyErr_SetString(unknownTypeError, msg.str().c_str()); + }catch (IncorrectChunk& e){ + std::stringstream msg; msg << "Incorrect chunk " << e.chunktype; + PyErr_SetString(incorrectChunkError, msg.str().c_str()); + }catch (const std::exception & e){ + PyErr_SetString(PyExc_RuntimeError, "general exception"); + } +} PyObject *psfscalar_to_python(const PSFScalar *scalar) { - PyObject *result = NULL; - if (const PSFDoubleScalar *p = dynamic_cast(scalar)) - result = PyFloat_FromDouble(p->value); - else if (const PSFInt32Scalar *p = dynamic_cast(scalar)) - result = PyInt_FromLong((int)*p); - else if (const PSFStringScalar *p = dynamic_cast(scalar)) - result = PyString_FromString(p->tostring().c_str()); - else if(const StructScalar *p = dynamic_cast(scalar)) - result = Struct_to_python::convert(p->value); - else - throw NotImplemented(); - - return result; -} + PyObject *result = NULL; + if (const PSFDoubleScalar *p = dynamic_cast(scalar)) + result = PyFloat_FromDouble(p->value); + else if (const PSFInt32Scalar *p = dynamic_cast(scalar)) + result = LONG_INPUT_FN((int)*p); + else if (const PSFStringScalar *p = dynamic_cast(scalar)) + result = STRING_INPUT_FN(p->tostring().c_str()); + else if(const StructScalar *p = dynamic_cast(scalar)) + result = struct_to_python(p->value); + else + throw NotImplemented(); -struct PSFScalar_to_python { - static PyObject *convert(const PSFScalar *scalar) { - PyObject *result = psfscalar_to_python(scalar); - delete scalar; return result; - } -}; +} -struct PropertyMap_to_python { - static PyObject *convert(const PropertyMap& propmap) { +PyObject* struct_to_python(const Struct& s){ PyObject *dict = PyDict_New(); - for(PropertyMap::const_iterator i = propmap.begin(); i != propmap.end(); i++) - PyDict_SetItem(dict, PyString_FromString(i->first.c_str()), - psfscalar_to_python(i->second)); + for(Struct::const_iterator i = s.begin(); i != s.end(); i++) { + PyDict_SetItem(dict, STRING_INPUT_FN(i->first.c_str()), + psfscalar_to_python(i->second)); + } + return dict; - } -}; -PyObject *Struct_to_python::convert(const Struct& s) { - PyObject *dict = PyDict_New(); - - for(Struct::const_iterator i = s.begin(); i != s.end(); i++) { - PyDict_SetItem(dict, PyString_FromString(i->first.c_str()), - psfscalar_to_python(i->second)); - } - - return dict; } -PyObject *create_numpy_vector(int n, int type, void *data, bool copy) { - npy_intp dims[1] = { n }; +PyObject* vector_struct_to_python(VectorStruct *vs) { + // Create dictionary of numpy arrays + PyObject *dict = PyDict_New(); - if(copy) { - PyObject *result = PyArray_SimpleNew(1, dims, type); - void *arr_data = PyArray_DATA((PyArrayObject*)result); - memcpy(arr_data, data, PyArray_ITEMSIZE((PyArrayObject*) result) * n); - return result; - } else - return PyArray_SimpleNewFromData(1, dims, type, data); + for(VectorStruct::const_iterator i = vs->begin(); i != vs->end(); i++) + PyDict_SetItem(dict, STRING_INPUT_FN(i->first.c_str()), + (PyObject *) psfvector_to_numpy_array(i->second, true)); + return dict; } -PyObject *psfvector_to_numpyarray(PSFVector *vec, bool copy=false) { - PyObject *result = NULL; +PyObject* create_numpy_vector(int n, int type, void *data, bool copy) { + npy_intp dims[1] = { n }; - if (PSFDoubleVector *f64v = dynamic_cast(vec)) { - // Create numpy array - result = create_numpy_vector(f64v->size(), PyArray_DOUBLE, &f64v->at(0), copy); - } else if (PSFComplexDoubleVector *cf64v = - dynamic_cast(vec)) { - // Create numpy array - result = create_numpy_vector(cf64v->size(), PyArray_CDOUBLE, &cf64v->at(0), copy); - } else if (StructVector *sv = dynamic_cast(vec)) { - // Create numpy array - npy_intp dims[1] = { sv->size() }; - result = PyArray_SimpleNew(1, dims, PyArray_OBJECT); - - PyObject **ptr = (PyObject **) PyArray_DATA(result); - for(unsigned int i=0; i < sv->size(); i++) - ptr[i] = Struct_to_python::convert(sv->at(i)); - - // Make source vector is deleted - copy = true; - } else if (vec == NULL) { - result = Py_None; - } - - if(copy && (vec != NULL)) - delete vec; - - return result; + if(copy) { + PyObject *result = PyArray_SimpleNew(1, dims, type); + void *arr_data = PyArray_DATA((PyArrayObject*)result); + memcpy(arr_data, data, PyArray_ITEMSIZE((PyArrayObject*) result) * n); + return result; + } else + return PyArray_SimpleNewFromData(1, dims, type, data); } -PyObject *vectorstruct_to_python(VectorStruct *vs) { - // Create dictionary of numpy arrays - PyObject *dict = PyDict_New(); - - for(VectorStruct::const_iterator i = vs->begin(); i != vs->end(); i++) - PyDict_SetItem(dict, PyString_FromString(i->first.c_str()), - psfvector_to_numpyarray(i->second, true)); - return dict; -} +PyObject* psfvector_to_numpy_array(PSFVector *vec, bool copy) { + PyObject *result = NULL; + + if (PSFDoubleVector *f64v = dynamic_cast(vec)) { + // Create numpy array + result = create_numpy_vector(f64v->size(), NPY_DOUBLE, &f64v->at(0), copy); + } else if (PSFComplexDoubleVector *cf64v = + dynamic_cast(vec)) { + // Create numpy array + result = create_numpy_vector(cf64v->size(), NPY_CDOUBLE, &cf64v->at(0), copy); + } else if (StructVector *sv = dynamic_cast(vec)) { + // Create numpy array + npy_intp dims[1] = { (int) sv->size() }; + result = PyArray_SimpleNew(1, dims, NPY_OBJECT); + + PyObject **ptr = (PyObject **) PyArray_DATA((PyArrayObject*) result); + for(unsigned int i=0; i < sv->size(); i++) + ptr[i] = struct_to_python(sv->at(i)); + + // Make source vector is deleted + copy = true; + } else if (vec == NULL) { + result = Py_None; + } -struct PSFVector_to_numpyarray { - static PyObject *convert(PSFVector *vec) { - return psfvector_to_numpyarray(vec); - } -}; + if(copy && (vec != NULL)) + delete vec; -struct PSFBase_to_numpyarray { - static PyObject *convert(PSFBase *d) { + return result; +} + +PyObject* psfbase_to_numpy_array(PSFBase * d){ const PSFScalar *scalar = dynamic_cast(d); if (scalar != NULL) - return psfscalar_to_python(scalar); + return psfscalar_to_python(scalar); else { - PSFVector *vector = dynamic_cast(d); - if (vector != NULL) - return psfvector_to_numpyarray(vector, true); - else { - VectorStruct *vs = dynamic_cast(d); - - if(vs != NULL) - return vectorstruct_to_python(vs); - else - return Py_None; - } + PSFVector *vector = dynamic_cast(d); + if (vector != NULL) + return psfvector_to_numpy_array(vector, true); + else { + VectorStruct *vs = dynamic_cast(d); + + if(vs != NULL) + return vector_struct_to_python(vs); + else + return Py_None; + } } - } -}; - -// Exception translators -void translate_exception(IncorrectChunk const& e) { - std::stringstream msg; msg << "Incorrect chunk " << e.chunktype; - PyErr_SetString(PyExc_RuntimeError, msg.str().c_str()); } -void translate_exception_unknown_type(UnknownType const& e) { - std::stringstream msg; msg << "Unknown type " << e.type_id; - PyErr_SetString(PyExc_RuntimeError, msg.str().c_str()); -} +PyObject* propertymap_to_python(PropertyMap& propmap){ + PyObject *dict = PyDict_New(); -void translate_exception_notfound(NotFound const& e) { - std::stringstream msg; msg << "Signal not found"; - PyErr_SetString(PyExc_RuntimeError, msg.str().c_str()); -} + for(PropertyMap::const_iterator i = propmap.begin(); i != propmap.end(); i++) + PyDict_SetItem(dict, STRING_INPUT_FN(i->first.c_str()), + psfscalar_to_python(i->second)); + return dict; -void translate_exception_fileopenerror(FileOpenError const& e) { - std::stringstream msg; msg << "File open error"; - PyErr_SetString(PyExc_IOError, msg.str().c_str()); } - -BOOST_PYTHON_MODULE(libpsf) -{ - import_array(); - to_python_converter(); - to_python_converter(); - to_python_converter(); - to_python_converter(); - to_python_converter(); - to_python_converter >, VecToList >(); - - bool show_user_defined = true; - bool show_cpp_signatures = false; - bool show_py_signatures = true; - docstring_options doc_options(show_user_defined, show_py_signatures, show_cpp_signatures); - - class_("PSFDataSet", "Open a psf results file.", - init((arg("self"), arg("filename")))) - .def("get_nsweeps", - &PSFDataSet::get_nsweeps, - (arg("self")), - "Return the number of sweeps") - .def("get_sweep_npoints", - &PSFDataSet::get_sweep_npoints, - (arg("self")), - "Return the number of points in the sweep") - .def("get_signal_names", - &PSFDataSet::get_signal_names, - (arg("self")), - "Return a list of signal names") - .def("get_sweep_param_names", - &PSFDataSet::get_sweep_param_names, - (arg("self")), - "Parameter that has been swept") - .def("get_sweep_values", - &PSFDataSet::get_sweep_values, - (arg("self")), - "numpy array of swept values", - return_value_policy()) - .def("get_signal", - &PSFDataSet::get_signal, - (arg("self"), arg("signal")), - "numpy array of signal values", - return_value_policy()) - .def("get_header_properties", - &PSFDataSet::get_header_properties, - (arg("self")), - "Dict of header properties and values", - return_value_policy()) - .def("get_signal_properties", - &PSFDataSet::get_signal_properties, - (arg("self"), arg("signal")), - "Properties of a non swept signal", - return_value_policy()) - .def("is_swept", - &PSFDataSet::is_swept, - (arg("self")), - "Is the data swept") - .def("close", - &PSFDataSet::close, - (arg("self")), - "Close PSF data set") - .add_property("invertstruct", - &PSFDataSet::get_invertstruct, - &PSFDataSet::set_invertstruct) - ; - - class_ incorrectChunkClass("IncorrectChunk", init()); - // class_ incorrectChunkClass("NotFound", init<>()); - boost::python::register_exception_translator(&translate_exception); - boost::python::register_exception_translator(&translate_exception_notfound); - boost::python::register_exception_translator(&translate_exception_fileopenerror); - boost::python::register_exception_translator(&translate_exception_unknown_type); -} diff --git a/bindings/python/psfpython.h b/bindings/python/psfpython.h new file mode 100644 index 0000000..bb10411 --- /dev/null +++ b/bindings/python/psfpython.h @@ -0,0 +1,13 @@ +#include "Python.h" +#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION +#include +#include "psfdata.h" + +void raise_py_error(); +PyObject* psfscalar_to_python(const PSFScalar *scalar); +PyObject* struct_to_python(const Struct& s); +PyObject* create_numpy_vector(int n, int type, void *data, bool copy); +PyObject* psfvector_to_numpy_array(PSFVector *vec, bool copy=false); +PyObject* vector_struct_to_python(VectorStruct *vs); +PyObject* psfbase_to_numpy_array(PSFBase * d); +PyObject* propertymap_to_python(PropertyMap& propmap); diff --git a/bindings/python/setup.py b/bindings/python/setup.py deleted file mode 100644 index 9ec798a..0000000 --- a/bindings/python/setup.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -from setuptools import setup -from setuptools.extension import Extension -import os, sys, platform -from distutils.command.build_ext import build_ext -from setuptools import Command - - -libpsf_ext = Extension(name = "libpsf", - sources = ["psfpython.cc"], - libraries = ["boost_python", "psf"]) - -import numpy -libpsf_ext.include_dirs.append(numpy.get_include()) - -setup( - name="libpsf", - ext_modules=[libpsf_ext], - package_dir = {"" : "."}, - packages=["tests"], - #tests_require=["mock"], - test_suite="tests", - ) diff --git a/bindings/python/setup.py.in b/bindings/python/setup.py.in new file mode 100644 index 0000000..041fb6e --- /dev/null +++ b/bindings/python/setup.py.in @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +import os +import numpy + +# BEFORE importing setuptools, remove MANIFEST. Otherwise it may not be +# properly updated when the contents of directories change (true for distutils, +# not sure about setuptools). +if os.path.exists('MANIFEST'): + os.remove('MANIFEST') + +from setuptools import setup, Extension +from Cython.Build import cythonize + +try: + with open('README.md', "r") as fh: + long_description = fh.read() +except: + long_description = '' + +numpy_includes = [numpy.get_include()] +python_includes = "${Python_INCLUDE_DIRS}".split(":") +psf_includes = "${LIBPSF_INCLUDE}".split() +psf_lib_dir = "${LIBPSF_BUILD_DIR}" + +# https://stackoverflow.com/questions/4597228/how-to-statically-link-a-library-when-compiling-a-python-module-extension +static_libraries = ['psf'] +extra_objects = [ + '{}/lib{}_static.a'.format(psf_lib_dir, l) for l in static_libraries] +libraries = [] +extra_link_args=[] + + +libpsf_ext = Extension( + name="libpsf", + sources=["${CMAKE_CURRENT_SOURCE_DIR}/libpsf.pyx", + "${CMAKE_CURRENT_SOURCE_DIR}/psfpython.cc"], + extra_objects=extra_objects, + libraries=libraries, + include_dirs=psf_includes + numpy_includes + python_includes, + extra_link_args=extra_link_args, +) + +setup( + name="libpsf", + ext_modules=cythonize([libpsf_ext]), + version="${CMAKE_PROJECT_VERSION}", + description="library to read Cadence PSF output", + install_requires=['numpy>=1.10.0'], + test_suite="tests", + # metadata to display on PyPI + author="@lekez2005 originally Henrik Johansen", + long_description=long_description, + long_description_content_type="text/markdown", + license="GNU Lesser General Public License v3.0", + keywords=["cadence", "spectre", "virtuoso", "circtuit", "simulation", + "waveform", "circuit simulation"], + zip_safe=False +) diff --git a/bindings/python/tests/__init__.py b/bindings/python/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/bindings/python/tests/test_psfdataset.py b/bindings/python/tests/test_psfdataset.py index 7ed2c2d..08e9b59 100644 --- a/bindings/python/tests/test_psfdataset.py +++ b/bindings/python/tests/test_psfdataset.py @@ -6,12 +6,13 @@ class test_tran(unittest.TestCase): def setUp(self): - self.psf = libpsf.PSFDataSet(os.path.dirname(__file__) + "/data/timeSweep") + dataset_file =os.path.join(os.path.dirname(__file__), "data/timeSweep") + self.psf = libpsf.PSFDataSet(dataset_file) def test_get_header_properties(self): props = self.psf.get_header_properties() - self.assertEquals(props, {'PSF style': 7, + self.assertEqual(props, {'PSF style': 7, 'PsfTrailerStart': 0, 'date': ' 5-Sep-2007 14:24:31', 'PSF buffer size': 593920, @@ -53,7 +54,7 @@ def test_is_swept(self): self.assertTrue(self.psf.is_swept()) - # FIXME This test segfaults - # def test_get_signal_properties(self): - # self.psf.get_signal_properties("PSUP") + def test_get_signal_properties(self): + with self.assertRaises(libpsf.NotFound): + self.psf.get_signal_properties("PSUP") diff --git a/configure.ac b/configure.ac deleted file mode 100644 index 3c4a092..0000000 --- a/configure.ac +++ /dev/null @@ -1,98 +0,0 @@ -# -*- Autoconf -*- -# Process this file with autoconf to produce a configure script. - -AC_PREREQ([2.59]) -AC_INIT([libpsf],[0.2],[henrik@johome.net]) -AC_CONFIG_MACRO_DIR([m4]) -AM_INIT_AUTOMAKE([1.9 -Wall no-define]) - -AC_LANG([C++]) - -dnl Checks for programs. -AC_PROG_CXX -AC_PROG_AWK -AC_PROG_CC -AC_PROG_CPP -AC_PROG_LIBTOOL -AC_PROG_INSTALL -AC_PROG_LN_S -AC_PROG_MAKE_SET -AC_PROG_RANLIB -AM_PROG_LIBTOOL - -dnl Checks for libraries. - -# Checks for header files. -AC_CHECK_HEADERS([arpa/inet.h fcntl.h stdint.h stdlib.h string.h unistd.h]) - -dnl Checks for typedefs, structures, and compiler characteristics. -AC_HEADER_STDBOOL -AC_TYPE_INT32_T -AC_TYPE_INT8_T -AC_TYPE_OFF_T -AC_TYPE_UINT32_T -AC_TYPE_UINT64_T - -dnl Check existence of tr1::unordered_map -AC_CHECK_HEADERS([tr1/unordered_map]) - -dnl Checks for boost -AX_BOOST_BASE([1.32.0]) -if test "$succeeded" != "yes" ; then - echo "Error: You need to install the boost library!" - exit -fi - -dnl check to build python bindings -AC_ARG_WITH(python, [AS_HELP_STRING([--with-python], [compile with Python bindings])], - with_python=$withval, with_python=no) - -if test $with_python = yes; then - AX_PYTHON_DEVEL - - AX_BOOST_PYTHON - - dnl Checks for boost python and numpy - AM_PATH_PYTHON(2.5, have_python=true, have_python=false) - - dnl Check for numpy - AC_MSG_CHECKING([for Numpy include directory]) - - CPPFLAGS_SAVE=$CPPFLAGS - CPPFLAGS=$PYTHON_CPPFLAGS $CPPFLAGS - NUMPY_INCLUDE_DIR=`echo "import numpy; print numpy.get_include()" | $PYTHON - 2>/dev/null` - AC_SUBST(NUMPY_INCLUDE_DIR) - AC_CHECK_HEADER([${NUMPY_INCLUDE_DIR}/numpy/arrayobject.h], - [NUMPY_HEADER=yes], - [AC_MSG_WARN([Numpy extension header not found])], - [#include "Python.h"]) - - AC_SUBST(NUMPY_HEADER) - CPPFLAGS_SAVE=$CPPFLAGS -fi - - -AM_CONDITIONAL(WITH_PYTHONBINDINGS, test ! -z "$BOOST_PYTHON_LIB" -a "x$NUMPY_HEADER" == "xyes" ) - -dnl Checks for library functions. -AC_FUNC_MMAP - -dnl Include libtool to build shared libraries -dnl LT_INIT - -dnl Check if we should build tests -AC_ARG_ENABLE([tests], AS_HELP_STRING([--enable-tests], [Enable tests])) - -AM_CONDITIONAL([ENABLE_TESTS], [test "$enable_tests" = yes]) - -AC_CONFIG_FILES([Makefile - include/Makefile - src/Makefile - bindings/Makefile - bindings/python/Makefile - test/Makefile - libpsf.pc - libpsf-uninstalled.pc - ]) -AC_OUTPUT - diff --git a/doc/format.txt b/doc/format.txt new file mode 100644 index 0000000..ddac0ba --- /dev/null +++ b/doc/format.txt @@ -0,0 +1,15 @@ +PSF + +Size: Last four bytes (Big-endian) + +nsections = (size - datasize - 12) / 8 + +const char *toc = buf + size - 12 - nsections*8; + +#define SECTION_HEADER 0 +#define SECTION_TYPE 1 +#define SECTION_SWEEP 2 +#define SECTION_TRACE 3 +#define SECTION_VALUE 4 + + diff --git a/examples/python/dcop.py b/examples/python/dcop.py index 28c1df1..270f9f0 100644 --- a/examples/python/dcop.py +++ b/examples/python/dcop.py @@ -5,5 +5,5 @@ #print list(d.get_signal_names()) -print d1.get_signal('XIRXRFMIXTRIM0.XM1PDAC6.XMN.MAIN') -print d2.get_signal('DACTOP_0.DIV2_1.MN27.mm4ynj') +print(d1.get_signal('XIRXRFMIXTRIM0.XM1PDAC6.XMN.MAIN')) +print(d2.get_signal('DACTOP_0.DIV2_1.MN27.mm4ynj')) diff --git a/examples/python/freqsweep.py b/examples/python/freqsweep.py index eb040da..4ee53e3 100644 --- a/examples/python/freqsweep.py +++ b/examples/python/freqsweep.py @@ -2,5 +2,5 @@ d = libpsf.PSFDataSet("../data/frequencySweep") -print d.get_sweep_values() -print d.get_signal('ANT_CM') +print(d.get_sweep_values()) +print(d.get_signal('ANT_CM')) diff --git a/examples/python/properties.py b/examples/python/properties.py index b259870..eb85bf8 100644 --- a/examples/python/properties.py +++ b/examples/python/properties.py @@ -4,6 +4,6 @@ d = libpsf.PSFDataSet("../data/opBegin") -print d.get_header_properties() +print(d.get_header_properties()) -print d.get_signal_properties('XIRXRFMIXTRIM0.XM1PDAC1.XMN.MAIN') +print(d.get_signal_properties('XIRXRFMIXTRIM0.XM1PDAC1.XMN.MAIN')) diff --git a/examples/python/pssfd.py b/examples/python/pssfd.py index 08a8057..d1b8aba 100644 --- a/examples/python/pssfd.py +++ b/examples/python/pssfd.py @@ -2,6 +2,6 @@ d = libpsf.PSFDataSet("../data/pss0.fd.pss") -print list(d.get_signal_names()) +print(list(d.get_signal_names())) -print d.get_sweep_values(), d.get_signal('1') +print(d.get_sweep_values(), d.get_signal('1')) diff --git a/examples/python/srcsweep.py b/examples/python/srcsweep.py index aa801b2..20300a8 100644 --- a/examples/python/srcsweep.py +++ b/examples/python/srcsweep.py @@ -2,6 +2,6 @@ d = libpsf.PSFDataSet("../data/srcSweep") -print list(d.get_signal_names()) +print(list(d.get_signal_names())) -print d.get_signal('VIN') +print(d.get_signal('VIN')) diff --git a/examples/python/tran.py b/examples/python/tran.py index 5a1d4e5..7637ef6 100644 --- a/examples/python/tran.py +++ b/examples/python/tran.py @@ -1,16 +1,16 @@ +import matplotlib.pyplot as plt import libpsf d = libpsf.PSFDataSet("../data/timeSweep") -print d.get_header_properties() +print(d.get_header_properties()) -print list(d.get_signal_names()) +print(list(d.get_signal_names())) t = d.get_sweep_values() v = d.get_signal('INP') -print len(v), len(t) +print (len(v), len(t)) -import pylab -pylab.plot(t, v) -pylab.show() +plt.plot(t, v) +plt.show() diff --git a/include/Makefile.am b/include/Makefile.am deleted file mode 100644 index d147cc9..0000000 --- a/include/Makefile.am +++ /dev/null @@ -1 +0,0 @@ -include_HEADERS = psfdata.h psf.h diff --git a/install-sh b/install-sh deleted file mode 100755 index 6781b98..0000000 --- a/install-sh +++ /dev/null @@ -1,520 +0,0 @@ -#!/bin/sh -# install - install a program, script, or datafile - -scriptversion=2009-04-28.21; # UTC - -# This originates from X11R5 (mit/util/scripts/install.sh), which was -# later released in X11R6 (xc/config/util/install.sh) with the -# following copyright and license. -# -# Copyright (C) 1994 X Consortium -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN -# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- -# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -# Except as contained in this notice, the name of the X Consortium shall not -# be used in advertising or otherwise to promote the sale, use or other deal- -# ings in this Software without prior written authorization from the X Consor- -# tium. -# -# -# FSF changes to this file are in the public domain. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# `make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. - -nl=' -' -IFS=" "" $nl" - -# set DOITPROG to echo to test this script - -# Don't use :- since 4.3BSD and earlier shells don't like it. -doit=${DOITPROG-} -if test -z "$doit"; then - doit_exec=exec -else - doit_exec=$doit -fi - -# Put in absolute file names if you don't have them in your path; -# or use environment vars. - -chgrpprog=${CHGRPPROG-chgrp} -chmodprog=${CHMODPROG-chmod} -chownprog=${CHOWNPROG-chown} -cmpprog=${CMPPROG-cmp} -cpprog=${CPPROG-cp} -mkdirprog=${MKDIRPROG-mkdir} -mvprog=${MVPROG-mv} -rmprog=${RMPROG-rm} -stripprog=${STRIPPROG-strip} - -posix_glob='?' -initialize_posix_glob=' - test "$posix_glob" != "?" || { - if (set -f) 2>/dev/null; then - posix_glob= - else - posix_glob=: - fi - } -' - -posix_mkdir= - -# Desired mode of installed file. -mode=0755 - -chgrpcmd= -chmodcmd=$chmodprog -chowncmd= -mvcmd=$mvprog -rmcmd="$rmprog -f" -stripcmd= - -src= -dst= -dir_arg= -dst_arg= - -copy_on_change=false -no_target_directory= - -usage="\ -Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE - or: $0 [OPTION]... SRCFILES... DIRECTORY - or: $0 [OPTION]... -t DIRECTORY SRCFILES... - or: $0 [OPTION]... -d DIRECTORIES... - -In the 1st form, copy SRCFILE to DSTFILE. -In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. -In the 4th, create DIRECTORIES. - -Options: - --help display this help and exit. - --version display version info and exit. - - -c (ignored) - -C install only if different (preserve the last data modification time) - -d create directories instead of installing files. - -g GROUP $chgrpprog installed files to GROUP. - -m MODE $chmodprog installed files to MODE. - -o USER $chownprog installed files to USER. - -s $stripprog installed files. - -t DIRECTORY install into DIRECTORY. - -T report an error if DSTFILE is a directory. - -Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG - RMPROG STRIPPROG -" - -while test $# -ne 0; do - case $1 in - -c) ;; - - -C) copy_on_change=true;; - - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" - shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 - case $mode in - *' '* | *' '* | *' -'* | *'*'* | *'?'* | *'['*) - echo "$0: invalid mode: $mode" >&2 - exit 1;; - esac - shift;; - - -o) chowncmd="$chownprog $2" - shift;; - - -s) stripcmd=$stripprog;; - - -t) dst_arg=$2 - shift;; - - -T) no_target_directory=true;; - - --version) echo "$0 $scriptversion"; exit $?;; - - --) shift - break;; - - -*) echo "$0: invalid option: $1" >&2 - exit 1;; - - *) break;; - esac - shift -done - -if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. - # Otherwise, the last argument is the destination. Remove it from $@. - for arg - do - if test -n "$dst_arg"; then - # $@ is not empty: it contains at least $arg. - set fnord "$@" "$dst_arg" - shift # fnord - fi - shift # arg - dst_arg=$arg - done -fi - -if test $# -eq 0; then - if test -z "$dir_arg"; then - echo "$0: no input file specified." >&2 - exit 1 - fi - # It's OK to call `install-sh -d' without argument. - # This can happen when creating conditional directories. - exit 0 -fi - -if test -z "$dir_arg"; then - trap '(exit $?); exit' 1 2 13 15 - - # Set umask so as not to create temps with too-generous modes. - # However, 'strip' requires both read and write access to temps. - case $mode in - # Optimize common cases. - *644) cp_umask=133;; - *755) cp_umask=22;; - - *[0-7]) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -fi - -for src -do - # Protect names starting with `-'. - case $src in - -*) src=./$src;; - esac - - if test -n "$dir_arg"; then - dst=$src - dstdir=$dst - test -d "$dstdir" - dstdir_status=$? - else - - # Waiting for this to be detected by the "$cpprog $src $dsttmp" command - # might cause directories to be created, which would be especially bad - # if $src (and thus $dsttmp) contains '*'. - if test ! -f "$src" && test ! -d "$src"; then - echo "$0: $src does not exist." >&2 - exit 1 - fi - - if test -z "$dst_arg"; then - echo "$0: no destination specified." >&2 - exit 1 - fi - - dst=$dst_arg - # Protect names starting with `-'. - case $dst in - -*) dst=./$dst;; - esac - - # If destination is a directory, append the input filename; won't work - # if double slashes aren't ignored. - if test -d "$dst"; then - if test -n "$no_target_directory"; then - echo "$0: $dst_arg: Is a directory" >&2 - exit 1 - fi - dstdir=$dst - dst=$dstdir/`basename "$src"` - dstdir_status=0 - else - # Prefer dirname, but fall back on a substitute if dirname fails. - dstdir=` - (dirname "$dst") 2>/dev/null || - expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$dst" : 'X\(//\)[^/]' \| \ - X"$dst" : 'X\(//\)$' \| \ - X"$dst" : 'X\(/\)' \| . 2>/dev/null || - echo X"$dst" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q' - ` - - test -d "$dstdir" - dstdir_status=$? - fi - fi - - obsolete_mkdir_used=false - - if test $dstdir_status != 0; then - case $posix_mkdir in - '') - # Create intermediate dirs using mode 755 as modified by the umask. - # This is like FreeBSD 'install' as of 1997-10-28. - umask=`umask` - case $stripcmd.$umask in - # Optimize common cases. - *[2367][2367]) mkdir_umask=$umask;; - .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; - - *[0-7]) - mkdir_umask=`expr $umask + 22 \ - - $umask % 100 % 40 + $umask % 20 \ - - $umask % 10 % 4 + $umask % 2 - `;; - *) mkdir_umask=$umask,go-w;; - esac - - # With -d, create the new directory with the user-specified mode. - # Otherwise, rely on $mkdir_umask. - if test -n "$dir_arg"; then - mkdir_mode=-m$mode - else - mkdir_mode= - fi - - posix_mkdir=false - case $umask in - *[123567][0-7][0-7]) - # POSIX mkdir -p sets u+wx bits regardless of umask, which - # is incompatible with FreeBSD 'install' when (umask & 300) != 0. - ;; - *) - tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ - trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0 - - if (umask $mkdir_umask && - exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1 - then - if test -z "$dir_arg" || { - # Check for POSIX incompatibilities with -m. - # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or - # other-writeable bit of parent directory when it shouldn't. - # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. - ls_ld_tmpdir=`ls -ld "$tmpdir"` - case $ls_ld_tmpdir in - d????-?r-*) different_mode=700;; - d????-?--*) different_mode=755;; - *) false;; - esac && - $mkdirprog -m$different_mode -p -- "$tmpdir" && { - ls_ld_tmpdir_1=`ls -ld "$tmpdir"` - test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" - } - } - then posix_mkdir=: - fi - rmdir "$tmpdir/d" "$tmpdir" - else - # Remove any dirs left behind by ancient mkdir implementations. - rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null - fi - trap '' 0;; - esac;; - esac - - if - $posix_mkdir && ( - umask $mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" - ) - then : - else - - # The umask is ridiculous, or mkdir does not conform to POSIX, - # or it failed possibly due to a race condition. Create the - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in - /*) prefix='/';; - -*) prefix='./';; - *) prefix='';; - esac - - eval "$initialize_posix_glob" - - oIFS=$IFS - IFS=/ - $posix_glob set -f - set fnord $dstdir - shift - $posix_glob set +f - IFS=$oIFS - - prefixes= - - for d - do - test -z "$d" && continue - - prefix=$prefix$d - if test -d "$prefix"; then - prefixes= - else - if $posix_mkdir; then - (umask=$mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break - # Don't fail if two instances are running concurrently. - test -d "$prefix" || exit 1 - else - case $prefix in - *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; - *) qprefix=$prefix;; - esac - prefixes="$prefixes '$qprefix'" - fi - fi - prefix=$prefix/ - done - - if test -n "$prefixes"; then - # Don't fail if two instances are running concurrently. - (umask $mkdir_umask && - eval "\$doit_exec \$mkdirprog $prefixes") || - test -d "$dstdir" || exit 1 - obsolete_mkdir_used=true - fi - fi - fi - - if test -n "$dir_arg"; then - { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && - { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || - test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 - else - - # Make a couple of temp file names in the proper directory. - dsttmp=$dstdir/_inst.$$_ - rmtmp=$dstdir/_rm.$$_ - - # Trap to clean up those temp files at exit. - trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 - - # Copy the file name to the temp name. - (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && - - # and set any options; do chmod last to preserve setuid bits. - # - # If any of these fail, we abort the whole thing. If we want to - # ignore errors from any of these, just make sure not to ignore - # errors from the above "$doit $cpprog $src $dsttmp" command. - # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && - { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && - { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && - old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && - new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && - - eval "$initialize_posix_glob" && - $posix_glob set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && - $posix_glob set +f && - - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then - rm -f "$dsttmp" - else - # Rename the file to the real destination. - $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || - - # The rename failed, perhaps because mv can't rename something else - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { - # Now remove or move aside any old file at destination location. - # We try this two ways since rm can't unlink itself on some - # systems and the destination file might be busy for other - # reasons. In this case, the final cleanup might fail but the new - # file should still install successfully. - { - test ! -f "$dst" || - $doit $rmcmd -f "$dst" 2>/dev/null || - { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && - { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } - } || - { echo "$0: cannot unlink or rename $dst" >&2 - (exit 1); exit 1 - } - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - - trap '' 0 - fi -done - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC" -# time-stamp-end: "; # UTC" -# End: diff --git a/libpsf-uninstalled.pc.in b/libpsf-uninstalled.pc.in deleted file mode 100644 index e4cba00..0000000 --- a/libpsf-uninstalled.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix= -exec_prefix= -libdir=src -includedir=include - -Name: @PACKAGE@ -Description: C++ PSF waveform reader library -Version: @VERSION@ -Requires: -Conflicts: -Libs: ${pcfiledir}/${libdir}/libpsf.la -Cflags: -I${pcfiledir}/${includedir} diff --git a/libpsf.pc.in b/libpsf.pc.in deleted file mode 100644 index 29c6e61..0000000 --- a/libpsf.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix=@prefix@ -exec_prefix=@prefix@ -libdir=@exec_prefix@/lib -includedir=@prefix@/include - -Name: @PACKAGE@ -Description: C++ PSF waveform reader library -Version: @VERSION@ -Requires: -Conflicts: -Libs: -L${libdir} -lpsf -Cflags: -I${includedir} diff --git a/m4/ax_boost_base.m4 b/m4/ax_boost_base.m4 deleted file mode 100644 index 5894d0c..0000000 --- a/m4/ax_boost_base.m4 +++ /dev/null @@ -1,207 +0,0 @@ -##### http://autoconf-archive.cryp.to/ax_boost_base.html -# -# SYNOPSIS -# -# AX_BOOST_BASE([MINIMUM-VERSION]) -# -# DESCRIPTION -# -# Test for the Boost C++ libraries of a particular version (or newer) -# -# If no path to the installed boost library is given the macro -# searchs under /usr, /usr/local, /opt and /opt/local and evaluates -# the $BOOST_ROOT environment variable. Further documentation is -# available at . -# -# This macro calls: -# -# AC_SUBST(BOOST_CPPFLAGS) / AC_SUBST(BOOST_LDFLAGS) -# -# And sets: -# -# HAVE_BOOST -# -# LAST MODIFICATION -# -# 2007-07-28 -# Modified for use in Thrift -# -# COPYLEFT -# -# Copyright (c) 2007 Thomas Porschberg -# -# Copying and distribution of this file, with or without -# modification, are permitted in any medium without royalty provided -# the copyright notice and this notice are preserved. - -AC_DEFUN([AX_BOOST_BASE], -[ -AC_ARG_WITH([boost], - AS_HELP_STRING([--with-boost@<:@=DIR@:>@], [use boost (default is yes) - it is possible to specify the root directory for boost (optional)]), - [ - if test "$withval" = "no"; then - want_boost="no" - elif test "$withval" = "yes"; then - want_boost="yes" - ac_boost_path="" - else - want_boost="yes" - ac_boost_path="$withval" - fi - ], - [want_boost="yes"]) - -if test "x$want_boost" = "xyes"; then - boost_lib_version_req=ifelse([$1], ,1.20.0,$1) - boost_lib_version_req_shorten=`expr $boost_lib_version_req : '\([[0-9]]*\.[[0-9]]*\)'` - boost_lib_version_req_major=`expr $boost_lib_version_req : '\([[0-9]]*\)'` - boost_lib_version_req_minor=`expr $boost_lib_version_req : '[[0-9]]*\.\([[0-9]]*\)'` - boost_lib_version_req_sub_minor=`expr $boost_lib_version_req : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'` - if test "x$boost_lib_version_req_sub_minor" = "x" ; then - boost_lib_version_req_sub_minor="0" - fi - WANT_BOOST_VERSION=`expr $boost_lib_version_req_major \* 100000 \+ $boost_lib_version_req_minor \* 100 \+ $boost_lib_version_req_sub_minor` - AC_MSG_CHECKING(for boostlib >= $boost_lib_version_req) - succeeded=no - - dnl first we check the system location for boost libraries - dnl this location ist chosen if boost libraries are installed with the --layout=system option - dnl or if you install boost with RPM - if test "$ac_boost_path" != ""; then - BOOST_LDFLAGS="-L$ac_boost_path/lib" - BOOST_CPPFLAGS="-I$ac_boost_path/include" - BOOST_ROOT_PATH="$ac_boost_path" - else - for ac_boost_path_tmp in /usr /usr/local /opt /opt/local ; do - if test -d "$ac_boost_path_tmp/include/boost" && test -r "$ac_boost_path_tmp/include/boost"; then - BOOST_LDFLAGS="-L$ac_boost_path_tmp/lib" - BOOST_CPPFLAGS="-I$ac_boost_path_tmp/include" - BOOST_ROOT_PATH="$ac_boost_path_tmp" - break; - fi - done - fi - - CPPFLAGS_SAVED="$CPPFLAGS" - CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS" - export CPPFLAGS - - LDFLAGS_SAVED="$LDFLAGS" - LDFLAGS="$LDFLAGS $BOOST_LDFLAGS" - export LDFLAGS - - export BOOST_ROOT_PATH - - AC_LANG_PUSH(C++) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - @%:@include - ]], [[ - #if BOOST_VERSION >= $WANT_BOOST_VERSION - // Everything is okay - #else - # error Boost version is too old - #endif - ]])],[ - AC_MSG_RESULT(yes) - succeeded=yes - found_system=yes - ],[ - ]) - AC_LANG_POP([C++]) - - - - dnl if we found no boost with system layout we search for boost libraries - dnl built and installed without the --layout=system option or for a staged(not installed) version - if test "x$succeeded" != "xyes"; then - _version=0 - if test "$ac_boost_path" != ""; then - BOOST_LDFLAGS="-L$ac_boost_path/lib" - if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then - for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do - _version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'` - V_CHECK=`expr $_version_tmp \> $_version` - if test "$V_CHECK" = "1" ; then - _version=$_version_tmp - fi - VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'` - BOOST_CPPFLAGS="-I$ac_boost_path/include/boost-$VERSION_UNDERSCORE" - done - fi - else - for ac_boost_path in /usr /usr/local /opt /opt/local ; do - if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then - for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do - _version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'` - V_CHECK=`expr $_version_tmp \> $_version` - if test "$V_CHECK" = "1" ; then - _version=$_version_tmp - best_path=$ac_boost_path - fi - done - fi - done - - VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'` - BOOST_CPPFLAGS="-I$best_path/include/boost-$VERSION_UNDERSCORE" - BOOST_LDFLAGS="-L$best_path/lib" - BOOST_ROOT_PATH="$best_path" - - if test "x$BOOST_ROOT" != "x"; then - if test -d "$BOOST_ROOT" && test -r "$BOOST_ROOT" && test -d "$BOOST_ROOT/stage/lib" && test -r "$BOOST_ROOT/stage/lib"; then - version_dir=`expr //$BOOST_ROOT : '.*/\(.*\)'` - stage_version=`echo $version_dir | sed 's/boost_//' | sed 's/_/./g'` - stage_version_shorten=`expr $stage_version : '\([[0-9]]*\.[[0-9]]*\)'` - V_CHECK=`expr $stage_version_shorten \>\= $_version` - if test "$V_CHECK" = "1" ; then - AC_MSG_NOTICE(We will use a staged boost library from $BOOST_ROOT) - BOOST_CPPFLAGS="-I$BOOST_ROOT" - BOOST_LDFLAGS="-L$BOOST_ROOT/stage/lib" - BOOST_ROOT_PATH="$BOOST_ROOT" - fi - fi - fi - fi - - CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS" - export CPPFLAGS - LDFLAGS="$LDFLAGS $BOOST_LDFLAGS" - export LDFLAGS - export BOOST_ROOT_PATH - - AC_LANG_PUSH(C++) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - @%:@include - ]], [[ - #if BOOST_VERSION >= $WANT_BOOST_VERSION - // Everything is okay - #else - # error Boost version is too old - #endif - ]])],[ - AC_MSG_RESULT(yes) - succeeded=yes - found_system=yes - ],[ - ]) - AC_LANG_POP([C++]) - fi - - if test "$succeeded" != "yes" ; then - if test "$_version" = "0" ; then - AC_MSG_WARN([[We could not detect the boost libraries (version $boost_lib_version_req_shorten or higher). If you have a staged boost library (still not installed) please specify \$BOOST_ROOT in your environment and do not give a PATH to --with-boost option. If you are sure you have boost installed, then check your version number looking in . See http://randspringer.de/boost for more documentation.]]) - else - AC_MSG_WARN([Your boost libraries seems to old (version $_version).]) - fi - else - AC_SUBST(BOOST_CPPFLAGS) - AC_SUBST(BOOST_LDFLAGS) - AC_SUBST(BOOST_ROOT_PATH) - AC_DEFINE(HAVE_BOOST,,[define if the Boost library is available]) - fi - - CPPFLAGS="$CPPFLAGS_SAVED" - LDFLAGS="$LDFLAGS_SAVED" -fi - -]) diff --git a/m4/ax_boost_python.m4 b/m4/ax_boost_python.m4 deleted file mode 100644 index bc1152e..0000000 --- a/m4/ax_boost_python.m4 +++ /dev/null @@ -1,89 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ax_boost_python.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_BOOST_PYTHON -# -# DESCRIPTION -# -# This macro checks to see if the Boost.Python library is installed. It -# also attempts to guess the currect library name using several attempts. -# It tries to build the library name using a user supplied name or suffix -# and then just the raw library. -# -# If the library is found, HAVE_BOOST_PYTHON is defined and -# BOOST_PYTHON_LIB is set to the name of the library. -# -# This macro calls AC_SUBST(BOOST_PYTHON_LIB). -# -# In order to ensure that the Python headers are specified on the include -# path, this macro requires AX_PYTHON to be called. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Michael Tindal -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 2 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Macro Archive. When you make and -# distribute a modified version of the Autoconf Macro, you may extend this -# special exception to the GPL to apply to your modified version as well. - -AC_DEFUN([AX_BOOST_PYTHON], -[AC_REQUIRE([AX_PYTHON_DEVEL])dnl -AC_CACHE_CHECK(whether the Boost::Python library is available, -ac_cv_boost_python, -[AC_LANG_SAVE - AC_LANG_CPLUSPLUS - CPPFLAGS_SAVE=$CPPFLAGS - CPPFLAGS="$PYTHON_CPPFLAGS $BOOST_CPPFLAGS $CPPFLAGS" - - AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[ - #include - using namespace boost::python; - BOOST_PYTHON_MODULE(test) { throw "Boost::Python test."; }]], - [[return 0;]]), - ac_cv_boost_python=yes, ac_cv_boost_python=no) - AC_LANG_RESTORE - CPPFLAGS="$CPPFLAGS_SAVE" -]) -if test "$ac_cv_boost_python" = "yes"; then - AC_DEFINE(HAVE_BOOST_PYTHON,,[define if the Boost::Python library is available]) - ax_python_lib=boost_python - AC_ARG_WITH([boost-python],AS_HELP_STRING([--with-boost-python],[specify the boost python library or suffix to use]), - [if test "x$with_boost_python" != "xno"; then - ax_python_lib=$with_boost_python - ax_boost_python_lib=boost_python-$with_boost_python - fi]) - for ax_lib in $ax_boost_python_lib boost_python boost_python-mt boost_python-mt-py2.5 boost_python-mt-py2.6; do - AC_CHECK_LIB($ax_lib, exit, [BOOST_PYTHON_LIB=$ax_lib break], [], [$PYTHON_LDFLAGS]) - done - AC_SUBST(BOOST_PYTHON_LIB) -fi -])dnl diff --git a/m4/ax_python.m4 b/m4/ax_python.m4 deleted file mode 100644 index 18216f9..0000000 --- a/m4/ax_python.m4 +++ /dev/null @@ -1,110 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ax_python.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON -# -# DESCRIPTION -# -# This macro does a complete Python development environment check. -# -# It recurses through several python versions (from 2.1 to 2.5 in this -# version), looking for an executable. When it finds an executable, it -# looks to find the header files and library. -# -# It sets PYTHON_BIN to the name of the python executable, -# PYTHON_INCLUDE_DIR to the directory holding the header files, and -# PYTHON_LIB to the name of the Python library. -# -# This macro calls AC_SUBST on PYTHON_BIN (via AC_CHECK_PROG), -# PYTHON_INCLUDE_DIR and PYTHON_LIB. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Michael Tindal -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 2 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Macro Archive. When you make and -# distribute a modified version of the Autoconf Macro, you may extend this -# special exception to the GPL to apply to your modified version as well. - -AC_DEFUN([AX_PYTHON], -[AC_MSG_CHECKING(for python build information) -AC_MSG_RESULT([]) - -AC_ARG_VAR(PYTHONBINARY, [Python binary]) - -if test x$PYTHONBINARY == x; then - PYTHONBINARY=python -fi - -AC_PATH_PROG([ax_python_bin], $PYTHONBINARY) - -ax_python_bin=$PYTHONBINARY - -if test x$ax_python_bin != x; then - python_inc_dir=`$ax_python_bin -c "import distutils.sysconfig; print(distutils.sysconfig.get_python_inc())"` - python_lib_dir=`$ax_python_bin -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_vars('LIBDIR')[[0]])"` - python_library=`$ax_python_bin -c "import re, distutils.sysconfig; print(re.match(\"lib(.+)\.so.*\",distutils.sysconfig.get_config_vars('LDLIBRARY')[[0]]).groups(0)[[0]])"` - - AC_CHECK_LIB([$python_library], main, ax_python_lib=$python_library, ax_python_lib=no) - AC_CHECK_HEADER([$python_inc_dir/Python.h], ax_python_header=$python_inc_dir, ax_python_header=no) - - AC_MSG_RESULT([python_bin: $python_bin]) - AC_MSG_RESULT([python_inc_dir: $python_inc_dir]) - AC_MSG_RESULT([python_lib_dir: $python_lib_dir]) - AC_MSG_RESULT([python_library: $python_library]) -fi - - -if test x$ax_python_bin = x; then - ax_python_bin=no -fi -if test x$ax_python_header = x; then - ax_python_header=no -fi -if test x$ax_python_lib = x; then - ax_python_lib=no -fi - -AC_MSG_RESULT([ results of the Python check:]) -AC_MSG_RESULT([ Binary: $ax_python_bin]) -AC_MSG_RESULT([ Library: $ax_python_lib]) -AC_MSG_RESULT([ Include Dir: $ax_python_header]) - -if test x$ax_python_header != xno; then - PYTHON_INCLUDE_DIR=$ax_python_header - AC_SUBST(PYTHON_INCLUDE_DIR) -fi -if test x$ax_python_lib != xno; then - PYTHON_LIB=$ax_python_lib - AC_SUBST(PYTHON_LIB) -fi -])dnl diff --git a/m4/ax_python_devel.m4 b/m4/ax_python_devel.m4 deleted file mode 100644 index a62b860..0000000 --- a/m4/ax_python_devel.m4 +++ /dev/null @@ -1,325 +0,0 @@ -# =========================================================================== -# http://www.gnu.org/software/autoconf-archive/ax_python_devel.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON_DEVEL([version]) -# -# DESCRIPTION -# -# Note: Defines as a precious variable "PYTHON_VERSION". Don't override it -# in your configure.ac. -# -# This macro checks for Python and tries to get the include path to -# 'Python.h'. It provides the $(PYTHON_CPPFLAGS) and $(PYTHON_LDFLAGS) -# output variables. It also exports $(PYTHON_EXTRA_LIBS) and -# $(PYTHON_EXTRA_LDFLAGS) for embedding Python in your code. -# -# You can search for some particular version of Python by passing a -# parameter to this macro, for example ">= '2.3.1'", or "== '2.4'". Please -# note that you *have* to pass also an operator along with the version to -# match, and pay special attention to the single quotes surrounding the -# version number. Don't use "PYTHON_VERSION" for this: that environment -# variable is declared as precious and thus reserved for the end-user. -# -# This macro should work for all versions of Python >= 2.1.0. As an end -# user, you can disable the check for the python version by setting the -# PYTHON_NOVERSIONCHECK environment variable to something else than the -# empty string. -# -# If you need to use this macro for an older Python version, please -# contact the authors. We're always open for feedback. -# -# LICENSE -# -# Copyright (c) 2009 Sebastian Huber -# Copyright (c) 2009 Alan W. Irwin -# Copyright (c) 2009 Rafael Laboissiere -# Copyright (c) 2009 Andrew Collier -# Copyright (c) 2009 Matteo Settenvini -# Copyright (c) 2009 Horst Knorr -# -# This program is free software: you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation, either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Archive. When you make and distribute a -# modified version of the Autoconf Macro, you may extend this special -# exception to the GPL to apply to your modified version as well. - -#serial 8 - -AU_ALIAS([AC_PYTHON_DEVEL], [AX_PYTHON_DEVEL]) -AC_DEFUN([AX_PYTHON_DEVEL],[ - # - # Allow the use of a (user set) custom python version - # - AC_ARG_VAR([PYTHON_VERSION],[The installed Python - version to use, for example '2.3'. This string - will be appended to the Python interpreter - canonical name.]) - - AC_PATH_PROG([PYTHON],[python[$PYTHON_VERSION]]) - if test -z "$PYTHON"; then - AC_MSG_ERROR([Cannot find python$PYTHON_VERSION in your system path]) - PYTHON_VERSION="" - fi - - # - # Check for a version of Python >= 2.1.0 - # - AC_MSG_CHECKING([for a version of Python >= '2.1.0']) - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[[0]]; \ - print (ver >= '2.1.0')"` - if test "$ac_supports_python_ver" != "True"; then - if test -z "$PYTHON_NOVERSIONCHECK"; then - AC_MSG_RESULT([no]) - AC_MSG_FAILURE([ -This version of the AC@&t@_PYTHON_DEVEL macro -doesn't work properly with versions of Python before -2.1.0. You may need to re-run configure, setting the -variables PYTHON_CPPFLAGS, PYTHON_LDFLAGS, PYTHON_SITE_PKG, -PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand. -Moreover, to disable this check, set PYTHON_NOVERSIONCHECK -to something else than an empty string. -]) - else - AC_MSG_RESULT([skip at user request]) - fi - else - AC_MSG_RESULT([yes]) - fi - - # - # if the macro parameter ``version'' is set, honour it - # - if test -n "$1"; then - AC_MSG_CHECKING([for a version of Python $1]) - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[[0]]; \ - print (ver $1)"` - if test "$ac_supports_python_ver" = "True"; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([this package requires Python $1. -If you have it installed, but it isn't the default Python -interpreter in your system path, please pass the PYTHON_VERSION -variable to configure. See ``configure --help'' for reference. -]) - PYTHON_VERSION="" - fi - fi - - # - # Check if you have distutils, else fail - # - AC_MSG_CHECKING([for the distutils Python package]) - ac_distutils_result=`$PYTHON -c "import distutils" 2>&1` - if test -z "$ac_distutils_result"; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([cannot import Python module "distutils". -Please check your Python installation. The error was: -$ac_distutils_result]) - PYTHON_VERSION="" - fi - - # - # Check for Python include path - # - AC_MSG_CHECKING([for Python include path]) - if test -z "$PYTHON_CPPFLAGS"; then - python_path=`$PYTHON -c "import distutils.sysconfig; \ - print (distutils.sysconfig.get_python_inc ());"` - if test -n "${python_path}"; then - python_path="-I$python_path" - fi - PYTHON_CPPFLAGS=$python_path - fi - AC_MSG_RESULT([$PYTHON_CPPFLAGS]) - AC_SUBST([PYTHON_CPPFLAGS]) - - # - # Check for Python library path - # - AC_MSG_CHECKING([for Python library path]) - if test -z "$PYTHON_LDFLAGS"; then - # (makes two attempts to ensure we've got a version number - # from the interpreter) - ac_python_version=`cat<]], - [[Py_Initialize();]]) - ],[pythonexists=yes],[pythonexists=no]) - AC_LANG_POP([C]) - # turn back to default flags - CPPFLAGS="$ac_save_CPPFLAGS" - LIBS="$ac_save_LIBS" - - AC_MSG_RESULT([$pythonexists]) - - if test ! "x$pythonexists" = "xyes"; then - AC_MSG_FAILURE([ - Could not link test program to Python. Maybe the main Python library has been - installed in some non-standard library path. If so, pass it to configure, - via the LDFLAGS environment variable. - Example: ./configure LDFLAGS="-L/usr/non-standard-path/python/lib" - ============================================================================ - ERROR! - You probably have to install the development version of the Python package - for your distribution. The exact name of this package varies among them. - ============================================================================ - ]) - PYTHON_VERSION="" - fi - - # - # all done! - # -]) diff --git a/m4/ax_python_module.m4 b/m4/ax_python_module.m4 deleted file mode 100644 index bd70a06..0000000 --- a/m4/ax_python_module.m4 +++ /dev/null @@ -1,49 +0,0 @@ -# =========================================================================== -# http://www.gnu.org/software/autoconf-archive/ax_python_module.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON_MODULE(modname[, fatal]) -# -# DESCRIPTION -# -# Checks for Python module. -# -# If fatal is non-empty then absence of a module will trigger an error. -# -# LICENSE -# -# Copyright (c) 2008 Andrew Collier -# -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice -# and this notice are preserved. This file is offered as-is, without any -# warranty. - -#serial 5 - -AU_ALIAS([AC_PYTHON_MODULE], [AX_PYTHON_MODULE]) -AC_DEFUN([AX_PYTHON_MODULE],[ - if test -z $PYTHON; - then - PYTHON="python" - fi - PYTHON_NAME=`basename $PYTHON` - AC_MSG_CHECKING($PYTHON_NAME module: $1) - $PYTHON -c "import $1" 2>/dev/null - if test $? -eq 0; - then - AC_MSG_RESULT(yes) - eval AS_TR_CPP(HAVE_PYMOD_$1)=yes - else - AC_MSG_RESULT(no) - eval AS_TR_CPP(HAVE_PYMOD_$1)=no - # - if test -n "$2" - then - AC_MSG_ERROR(failed to find required module $1) - exit 1 - fi - fi -]) diff --git a/spkg-install b/spkg-install deleted file mode 100755 index 8c9a339..0000000 --- a/spkg-install +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -if [ "$SPKG_LOCAL" = "" ]; then - echo "SPKG_LOCAL undefined ... exiting"; - echo "Maybe run 'qsnake --shell'?" - exit 1 -fi - -set -e - -unset RM -unset MAKEFLAGS - -./autogen.sh --prefix="$SPKG_LOCAL" --with-boost=$SPKG_LOCAL --with-python -make -make install diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 0000000..069eecd --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,35 @@ + +set(CMAKE_POSITION_INDEPENDENT_CODE ON) +include(GNUInstallDirs) + +set( BOOST_ROOT $ENV{BOOST_LOC} CACHE PATH "Boost library path" ) +FIND_PACKAGE( Boost COMPONENTS program_options REQUIRED ) + +include_directories(${BOOST_INCLUDE_DIRS}) +#include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + +file(GLOB PSF_SOURCE_FILES "*.cc") +file(GLOB PSF_INCLUDE_FILES "${CMAKE_CURRENT_SOURCE_DIR}/../include/*.h") +set(PSF_INCLUDE "${CMAKE_CURRENT_SOURCE_DIR}/../include") + +# dynamic library +add_library(psf SHARED ${PSF_SOURCE_FILES}) +set_target_properties(psf PROPERTIES VERSION ${PROJECT_VERSION}) +set_target_properties(psf PROPERTIES SOVERSION 1) +set_target_properties(psf PROPERTIES PUBLIC_HEADER "${PSF_INCLUDE_FILES}") +set_target_properties(psf PROPERTIES INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") +target_include_directories(psf PUBLIC ${PSF_INCLUDE}) + +# static library +add_library(psf_static STATIC ${PSF_SOURCE_FILES}) +target_include_directories(psf_static PRIVATE ${PSF_INCLUDE}) + +configure_file(libpsf.pc.in libpsf.pc @ONLY) + + +install(TARGETS psf psf_static + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + +install(FILES ${CMAKE_BINARY_DIR}/src/libpsf.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig) diff --git a/src/Makefile.am b/src/Makefile.am deleted file mode 100644 index 95f5ea1..0000000 --- a/src/Makefile.am +++ /dev/null @@ -1,16 +0,0 @@ -lib_LTLIBRARIES = libpsf.la - -bin_PROGRAMS = psftest -psftest_SOURCES = psftest.cc -psftest_CXXFLAGS = -I../include ${BOOST_CPPFLAGS} -psftest_LDFLAGS = libpsf.la - -libpsf_la_SOURCES = psf.cc psfdata.cc psfproperty.cc psfchunk.cc \ - psfcontainer.cc psfindexedcontainer.cc psfgroup.cc psffile.cc \ - psftype.cc psfstruct.cc psfsections.cc psftrace.cc \ - psfnonsweepvalue.cc psfsweepvalue.cc - -libpsf_la_CXXFLAGS = \ - -I../include ${BOOST_CPPFLAGS} - - diff --git a/src/libpsf.pc.in b/src/libpsf.pc.in new file mode 100644 index 0000000..9b15f62 --- /dev/null +++ b/src/libpsf.pc.in @@ -0,0 +1,12 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=@CMAKE_INSTALL_PREFIX@ +libdir=${exec_prefix}/@CMAKE_INSTALL_LIBDIR@ +includedir=${prefix}/@CMAKE_INSTALL_INCLUDEDIR@ + +Name: @PROJECT_NAME@ +Description: @PROJECT_DESCRIPTION@ +Version: @PROJECT_VERSION@ + +Requires: +Libs: -L${libdir} -lmylib +Cflags: -I${includedir} diff --git a/src/psf.cc b/src/psf.cc index 886bd0e..4b05852 100644 --- a/src/psf.cc +++ b/src/psf.cc @@ -94,6 +94,10 @@ const PSFScalar& PSFDataSet::get_signal_scalar(std::string name) const { const PropertyMap & PSFDataSet::get_signal_properties(std::string name) const { verify_open(); + if (is_swept()){ + throw NotFound(); + } + return m_psf->get_value_properties(name).get_propmap(); } diff --git a/src/psffile.cc b/src/psffile.cc index f0ecf50..85d6aa6 100644 --- a/src/psffile.cc +++ b/src/psffile.cc @@ -2,6 +2,8 @@ #include "psfdata.h" #include "psfinternal.h" +#include + #include #include #include @@ -18,81 +20,142 @@ PSFFile::PSFFile(std::string filename) : } PSFFile::~PSFFile() { - if(m_header) - delete(m_header); - if(m_types) - delete(m_types); - if(m_sweeps) - delete(m_sweeps); - if(m_traces) - delete(m_traces); - if(m_sweepvalues) - delete(m_sweepvalues); - if(m_nonsweepvalues) - delete(m_nonsweepvalues); + if (m_header) + delete (m_header); + if (m_types) + delete (m_types); + if (m_sweeps) + delete (m_sweeps); + if (m_traces) + delete (m_traces); + if (m_sweepvalues) + delete (m_sweepvalues); + if (m_nonsweepvalues) + delete (m_nonsweepvalues); close(); } -void PSFFile::deserialize(const char *buf, int size) { - // Last word contains the size of the data - uint32_t datasize; - datasize = GET_INT32(buf+size-4); - - // Read section index table +SectionMap PSFFile::load_sections(const char *buf, int size){ + std::vector
sections; + uint32_t section_offset = 4; + + int section_num = 0; + while ( section_offset < size ){ + Section section; + uint32_t section_type = GET_INT32(buf + section_offset); + if ( ! (section_type == HeaderSection::type)) + break; + section.n = section_num; + section.offset = section_offset; + + uint32_t section_end = GET_INT32(buf + section_offset + 4); + section.size = section_end - section_offset; - std::map sections; + sections.push_back(section); + + section_num++; + section_offset = section_end; + } + if (sections.size() < 3){ + throw InvalidFileError(); + } + + m_header = new HeaderSection(); + m_header->deserialize(buf + sections[SECTION_HEADER].offset, + sections[SECTION_HEADER].offset); + + int num_sweep_points = 0; + bool has_sweep = get_header_properties().hasprop("PSF sweep points"); + if (has_sweep) + num_sweep_points = get_header_properties().find("PSF sweep points"); + + if (num_sweep_points == 0) + sections[2].n = SECTION_VALUE; + + SectionMap section_map; + for (auto section: sections) + section_map[section.n] = section; + + return section_map; +} + +SectionMap PSFFile::load_table_of_contents(const char *buf, int size) { + // Last word contains the size of the data + uint32_t datasize; + datasize = GET_INT32(buf + size - 4); int nsections = (size - datasize - 12) / 8; int lastoffset = 0, lastsectionnum = -1; + const char *toc = buf + size - 12 - nsections * 8; - const char *toc = buf + size - 12 - nsections*8; - Section section; - for(int i=0; i < nsections; i++) { - section.n = GET_INT32(toc + 8*i); - section.offset = GET_INT32(toc + 8*i + 4); + SectionMap section_map; + + for (int i = 0; i < nsections; i++) { + Section section; + section.n = GET_INT32(toc + 8 * i); + section.offset = GET_INT32(toc + 8 * i + 4); - if (i>0) - sections[lastsectionnum].size = section.offset - lastoffset; + if (i > 0) + section_map[lastsectionnum].size = section.offset - lastoffset; + + if (i == nsections - 1) + section.size = size - section.offset; - sections[section.n] = section; + section_map[section.n] = section; - lastoffset = section.offset; - lastsectionnum = section.n; + lastoffset = section.offset; + lastsectionnum = section.n; } - sections[section.n].size = size - section.offset; m_header = new HeaderSection(); - m_header->deserialize(buf + sections[SECTION_HEADER].offset, sections[SECTION_HEADER].offset); + m_header->deserialize(buf + section_map[SECTION_HEADER].offset, + section_map[SECTION_HEADER].offset); + + return section_map; +} + +void PSFFile::deserialize(const char *buf, int size) { + // Read section index table + SectionMap sections; + if (is_done()) { + sections = load_table_of_contents(buf, size); + } else { + sections = load_sections(buf, size); + } // Read types if (sections.find(SECTION_TYPE) != sections.end()) { - m_types = new TypeSection(); - m_types->deserialize(buf + sections[SECTION_TYPE].offset, sections[SECTION_TYPE].offset); + m_types = new TypeSection(); + m_types->deserialize(buf + sections[SECTION_TYPE].offset, + sections[SECTION_TYPE].offset); } // Read sweeps - if (sections.find(SECTION_SWEEP) != sections.end()) { - m_sweeps = new SweepSection(this); - m_sweeps->deserialize(buf + sections[SECTION_SWEEP].offset, sections[SECTION_SWEEP].offset); + if (sections.find(SECTION_SWEEP) != sections.end()) { + m_sweeps = new SweepSection(this); + m_sweeps->deserialize(buf + sections[SECTION_SWEEP].offset, + sections[SECTION_SWEEP].offset); } // Read traces - if (sections.find(SECTION_TRACE) != sections.end()) { - m_traces = new TraceSection(this); - m_traces->deserialize(buf + sections[SECTION_TRACE].offset, sections[SECTION_TRACE].offset); + if (sections.find(SECTION_TRACE) != sections.end()) { + m_traces = new TraceSection(this); + m_traces->deserialize(buf + sections[SECTION_TRACE].offset, + sections[SECTION_TRACE].offset); } // Read values - if (sections.find(SECTION_VALUE) != sections.end()) { - if(m_sweeps != NULL) { - m_sweepvalues = new ValueSectionSweep(this); - m_sweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, sections[SECTION_VALUE].offset); - } else { - m_nonsweepvalues = new ValueSectionNonSweep(this); - m_nonsweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, sections[SECTION_VALUE].offset); - } + if (sections.find(SECTION_VALUE) != sections.end()) { + if (m_sweeps != NULL) { + m_sweepvalues = new ValueSectionSweep(this); + m_sweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, + sections[SECTION_VALUE].offset); + } else { + m_nonsweepvalues = new ValueSectionNonSweep(this); + m_nonsweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, + sections[SECTION_VALUE].offset); + } } - } void PSFFile::open() { @@ -104,11 +167,8 @@ void PSFFile::open() { m_size = lseek(m_fd, 0, SEEK_END); m_buffer = (char *)mmap(0, m_size, PROT_READ, MAP_SHARED, m_fd, 0); - - if(validate()) + deserialize((const char *)m_buffer, m_size); - else - throw InvalidFileError(); } void PSFFile::close() { @@ -124,7 +184,7 @@ void PSFFile::close() { } } -bool PSFFile::validate() const { +bool PSFFile::is_done() const { std::ifstream fstr(m_filename.c_str()); fstr.seekg(-12, std::ios::end); @@ -135,8 +195,7 @@ bool PSFFile::validate() const { clarissa[8]=0; return !strcmp(clarissa, "Clarissa"); -} - +} NameList PSFFile::get_param_names() const { if (m_sweeps != NULL) diff --git a/src/psfinternal.h b/src/psfinternal.h index aac3fd1..83943b5 100644 --- a/src/psfinternal.h +++ b/src/psfinternal.h @@ -64,6 +64,7 @@ typedef std::vector Filter; typedef std::vector NameList; typedef std::vector SweepValueList; typedef std::map PropertyMap; +typedef std::map SectionMap; #ifdef HAVE_TR1_UNORDERED_MAP typedef std::tr1::unordered_map TraceIDOffsetMap; typedef std::tr1::unordered_map NameIndexMap; @@ -623,10 +624,13 @@ class PSFFile { void close(); bool validate() const; + bool is_done() const; std::string m_filename; private: + SectionMap load_sections(const char *buf, int size); + SectionMap load_table_of_contents(const char *buf, int size); void deserialize(const char *buf, int size); int m_fd; diff --git a/src/psfreader.cc b/src/psfreader.cc deleted file mode 100644 index eeb24fe..0000000 --- a/src/psfreader.cc +++ /dev/null @@ -1,48 +0,0 @@ -#include "psf.h" -#include "psfdata.h" -#include "psfinternal.h" - -#include - - -Chunk * ValueSectionNonSweep::child_factory(int chunktype) { - if(NonSweepValue::ischunk(chunktype)) - return new NonSweepValue(psf); - else { - std::cerr << "Unexpected chunktype: " << chunktype << std::endl; - throw IncorrectChunk(chunktype); - } -} - -PSFData* ValueSectionNonSweep::get_value(std::string name) { - return dynamic_cast(get_child(name)).get_value(); -} - -int NonSweepValue::deserialize(const char *buf) { - const char *startbuf = buf; - - buf += Chunk::deserialize(buf); - - buf += id.deserialize(buf); - buf += name.deserialize(buf); - buf += valuetypeid.deserialize(buf); - - value = psf->types->get_typedef(valuetypeid.value).get_data_object(); - - buf += value->deserialize(buf); - - // Read optional properties - while(true) { - int chunktype = GET_INT32(buf); - - if(Property::ischunk(chunktype)) { - Property prop; - buf += prop.deserialize(buf); - properties.push_back(prop); - } else - break; - } - - return buf - startbuf; -} - diff --git a/src/psfsweepvalue.cc b/src/psfsweepvalue.cc index 4c48d12..b5cd65a 100644 --- a/src/psfsweepvalue.cc +++ b/src/psfsweepvalue.cc @@ -186,37 +186,54 @@ int SweepValueWindowed::deserialize(const char *buf, int *totaln, int windowoffs } for(int i=0; i < *totaln; ) { - buf += Chunk::deserialize(buf); - - int tmp = GET_INT32(buf); - int windowleft = tmp >> 16; - int n = tmp & 0xffff; // Number of data points in window - - buf += 4; - windowoffset += 4; - - // Deserialize parameter values from file to parameter vector (m_paramvalues) - int pwinstart = m_paramvalues->size(); - m_paramvalues->resize(m_paramvalues->size() + n); - for(int j=0; j < n; j++) - buf += paramtype.deserialize_data(m_paramvalues->ptr_at(pwinstart + j), buf); - - const char *valuebuf = buf; // Save start of trace values pointer in buffer - const_iterator idatavec = begin(); // Init iterator of destination trace vectors - for(Filter::const_iterator j=filter.begin(); j != filter.end(); j++, idatavec++) { - const DataTypeRef &typeref = dynamic_cast(**j); - - // calculate buffer pointer - buf = valuebuf + psf->get_value_section_sweep().get_valueoffset((*j)->get_id()) + - (windowsize - n * typeref.datasize()); - - for(int k=0; k < n; k++) - buf += typeref.deserialize_data((*idatavec)->ptr_at(i+k), buf); - } - - // Advance buffer pointer to end of trace values - buf = valuebuf + ntraces * windowsize; - i += n; + int testtype; + // some files have extra 31's between windows so discard them, + // TODO look into 31 padding + while (true) { + testtype = GET_INT32(buf); + if (testtype == 31) { + buf += 4; + } else { + break; + } + } + if (testtype == SweepValue::type) { + buf += 4; + } else if (testtype == ZeroPad::type) { + ZeroPad pad; + buf += pad.deserialize(buf); + buf += 4; + } + + int tmp = GET_INT32(buf); + int windowleft = tmp >> 16; + int n = tmp & 0xffff; // Number of data points in window + + buf += 4; + windowoffset += 4; + + // Deserialize parameter values from file to parameter vector (m_paramvalues) + int pwinstart = m_paramvalues->size(); + m_paramvalues->resize(m_paramvalues->size() + n); + for (int j = 0; j < n; j++) + buf += paramtype.deserialize_data(m_paramvalues->ptr_at(pwinstart + j), buf); + + const char *valuebuf = buf; // Save start of trace values pointer in buffer + const_iterator idatavec = begin(); // Init iterator of destination trace vectors + for (Filter::const_iterator j = filter.begin(); j != filter.end(); j++, idatavec++) { + const DataTypeRef &typeref = dynamic_cast(**j); + + // calculate buffer pointer + buf = valuebuf + psf->get_value_section_sweep().get_valueoffset((*j)->get_id()) + + (windowsize - n * typeref.datasize()); + + for (int k = 0; k < n; k++) + buf += typeref.deserialize_data((*idatavec)->ptr_at(i + k), buf); + } + + // Advance buffer pointer to end of trace values + buf = valuebuf + ntraces * windowsize; + i += n; } return buf - startbuf; } diff --git a/src/psftestraw.cc b/src/psftestraw.cc deleted file mode 100644 index 74b10d7..0000000 --- a/src/psftestraw.cc +++ /dev/null @@ -1,29 +0,0 @@ -#include "psf.h" - -#include - -int main() { - PSFFile psf("/nfs/home/henrik/spectre/1/pnoise.raw/pnoise_pout3g.pnoise"); - psf.open(); - - // NameList names = psf.traces->get_names(); - // BOOST_FOREACH(std::string name, names) { - // std::cout << name << " "; - // } - - ChildList filter; - filter.push_back(psf.traces->get_child(96226)); - - SweepValueList result; - - result = ((ValueSectionSweep *)psf.values)->get_values(*psf.traces); - //result = ((ValueSectionSweep *)psf.values)->get_values(filter); - - std::cout << result.size() << std::endl; - - Chunk *trace = psf.traces->get_child("tx_iqfilter_stop_0.tx_iqfilter_top_0.tx_iqfilter_bias_0.iprobe_gnd"); - - std::cout << trace->get_name() << std::endl; - - psf.close(); -} diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 0000000..cacb360 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1,25 @@ +add_executable(psftestraw psftestraw.cc) +target_include_directories(psftestraw PRIVATE $) +target_link_libraries(psftestraw psf) + +# https://github.com/dlaperriere/cmake_cppunit/blob/master/cppunit/cmake_modules/FindCPPUNIT.cmake +if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME AND BUILD_TESTING) + + FIND_PATH(CPPUNIT_INCLUDE_DIR cppunit/TestCase.h PATHS ${CMAKE_INSTALL_INCLUDEDIR}) + + if( CPPUNIT_INCLUDE_DIR ) + MESSAGE( STATUS "cppunit found at: ${CPPUNIT_INCLUDE_DIR}") + FIND_LIBRARY(CPPUNIT_LIBRARY cppunit ${CPPUNIT_INCLUDE_DIR}/../lib) + + add_executable(test_psfdataset test_psfdataset.cc) + target_link_libraries( test_psfdataset psf ${CPPUNIT_LIBRARY} ${CMAKE_DL_LIBS} ) + target_include_directories(test_psfdataset PRIVATE $) + target_include_directories(test_psfdataset PRIVATE ${CPPUNIT_INCLUDE_DIR}) + + add_test(NAME psfdataset COMMAND test_psfdataset WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}") + + else( CPPUNIT_INCLUDE_DIR ) + MESSAGE( WARNING "cppunit not found: skipping unit tests") + endif() + +endif() \ No newline at end of file diff --git a/test/Makefile.am b/test/Makefile.am deleted file mode 100644 index df8d331..0000000 --- a/test/Makefile.am +++ /dev/null @@ -1,7 +0,0 @@ -bin_PROGRAMS = test_psfdataset -test_psfdataset_SOURCES = test_psfdataset.cc -test_psfdataset_CXXFLAGS = -I../include ${BOOST_CPPFLAGS} -test_psfdataset_LDFLAGS = -L../src -lpsf -test_psfdataset_LDFLAGS += -lcppunit -ldl - - diff --git a/test/data/tran.tran b/test/data/tran.tran new file mode 100644 index 0000000..55a7a6e Binary files /dev/null and b/test/data/tran.tran differ diff --git a/test/psftestraw.cc b/test/psftestraw.cc new file mode 100644 index 0000000..f03e5f4 --- /dev/null +++ b/test/psftestraw.cc @@ -0,0 +1,52 @@ +#include + +#include "psf.h" + +int main(int argc, char *argv[]) { + char *filename = argv[1]; + + try { + PSFDataSet data(filename); + data.open(); + + for (auto prop : data.get_header_properties()) { + std::cout << prop.first << ": \t" << *prop.second << std::endl; + } + + for (auto prop : data.get_signal_names()) { + std::cout << prop << std::endl; + } + + std::vector signal_names; + if (argc < 3) { + signal_names.push_back("vin"); + } else { + for (int i = 2; i < argc; i++) { + signal_names.push_back(argv[i]); + } + } + + // find file name + std::string f_name = std::string(filename); + + size_t sep_pos = f_name.rfind('/', f_name.length()); + if (sep_pos != std::string::npos) { + f_name = f_name.substr(sep_pos + 1, f_name.length() - sep_pos); + } + + for (auto signal_name : signal_names) { + if (f_name.find("dc") != std::string::npos) { + std::cout << signal_name << " = " << data.get_signal_scalar(signal_name) << std::endl; + } else { + PSFDoubleVector *signal = (PSFDoubleVector *)data.get_signal_vector(signal_name); + std::cout << signal_name << " Number of time points = " << signal->size() << std::endl; + for (auto i = signal->begin(); i != signal->end(); ++i) + std::cout << *i << ' '; + std::cout << std::endl; + } + } + data.close(); + } catch (const std::exception &exc) { + std::cerr << "Exception caught " << exc.what() << "\n"; + } +} diff --git a/test/test_psfdataset.cc b/test/test_psfdataset.cc index 1fd0f4c..82cad90 100644 --- a/test/test_psfdataset.cc +++ b/test/test_psfdataset.cc @@ -71,12 +71,12 @@ class TestPSFDataSet : public CPPUNIT_NS::TestCase { void test_open_psfascii(); private: - std::auto_ptr m_dcop_ds, m_tran_ds; + std::unique_ptr m_dcop_ds, m_tran_ds; }; void TestPSFDataSet::setUp() { - m_dcop_ds = std::auto_ptr(new PSFDataSet("data/dcOp.dc")); - m_tran_ds = std::auto_ptr(new PSFDataSet("data/tran.tran")); + m_dcop_ds = std::unique_ptr(new PSFDataSet("data/dcOp.dc")); + m_tran_ds = std::unique_ptr(new PSFDataSet("data/tran.tran")); } // DCOP data set tests @@ -123,7 +123,7 @@ void TestPSFDataSet::test_tran_get_nsweeps() { void TestPSFDataSet::test_tran_get_sweep_npoints() { // test tran - CPPUNIT_ASSERT_EQUAL(m_tran_ds->get_sweep_npoints(), 24942); + CPPUNIT_ASSERT_EQUAL(m_tran_ds->get_sweep_npoints(), 76); } void TestPSFDataSet::test_tran_get_sweep_values() { @@ -131,8 +131,8 @@ void TestPSFDataSet::test_tran_get_sweep_values() { stringvector_t names = m_tran_ds->get_signal_names(); // Get signal vectors - for(stringvector_iter_t name_iter = names.begin(); name_iter != names.end(); name_iter++) { - const PSFVector* datavector = m_tran_ds->get_signal_vector("in"); + for(auto name: names) { + const PSFVector* datavector = m_tran_ds->get_signal_vector(name); delete(datavector); } }