From 5eb1e8d970f895c4380640ea0b5d376705ddc7c7 Mon Sep 17 00:00:00 2001 From: Hazen Babcock Date: Thu, 27 Oct 2016 17:20:59 -0400 Subject: [PATCH 01/17] Make tracker into a C library. --- storm_analysis/sa_utilities/std_analysis.py | 3 +- storm_analysis/sa_utilities/tracker.c | 32 +++++++++---------- storm_analysis/sa_utilities/tracker_c.py | 34 +++++++++++++++++++++ 3 files changed, 51 insertions(+), 18 deletions(-) create mode 100644 storm_analysis/sa_utilities/tracker_c.py diff --git a/storm_analysis/sa_utilities/std_analysis.py b/storm_analysis/sa_utilities/std_analysis.py index ad126e4a..f9a03783 100644 --- a/storm_analysis/sa_utilities/std_analysis.py +++ b/storm_analysis/sa_utilities/std_analysis.py @@ -195,7 +195,8 @@ def standardAnalysis(find_peaks, data_file, mlist_file, parameters): # Does the frame-to-frame tracking. def tracking(mol_list_filename, parameters): [min_z, max_z] = params.getZRange(parameters) - proc_params = [src_dir + "tracker", + proc_params = ["python", + src_dir + "tracker_c", mol_list_filename, parameters.descriptor, str(parameters.radius), diff --git a/storm_analysis/sa_utilities/tracker.c b/storm_analysis/sa_utilities/tracker.c index 20222bec..3b790fdd 100644 --- a/storm_analysis/sa_utilities/tracker.c +++ b/storm_analysis/sa_utilities/tracker.c @@ -13,16 +13,12 @@ * * 01/14 * + * Changed into a C library. * - * Hazen - * - * Compilation instructions: + * 10/16 * - * Linux: - * gcc tracker.c -o tracker -lm + * Hazen * - * Windows: - * gcc tracker.c -o tracker */ @@ -43,7 +39,7 @@ struct track_elt* createTrackObject(float *, int, int); int addObject(float *, float, int, int, int); void freeTrack(struct track_elt *); void cullTracks(FILE *, int, int); - +int tracker(int, const char **); /* Structures */ struct track_elt @@ -138,7 +134,7 @@ struct track_elt* createTrackObject(float *object_data, int track_id, int molecu int addObject(float *object_data, float r_sqr_max, int track_id, int molecule_id, int frame_no) { - int i, found = 0, *object_data_int; + int found = 0, *object_data_int; float dx, dy, weight; struct track_elt *cur, *last, *new_track, *new_object; @@ -268,7 +264,7 @@ void freeTrack(struct track_elt *start) void cullTracks(FILE *mlist, int cull_frame, int save_track_ids) { - int i, culled, *object_data_int, first_cat; + int culled, *object_data_int, first_cat; float *object_data; struct track_elt *cur, *last, *to_save; @@ -337,7 +333,7 @@ void cullTracks(FILE *mlist, int cull_frame, int save_track_ids) /* - * Main + * tracker * * Descriptor is a string of the form "02110311" that * describes the different frames. @@ -348,14 +344,14 @@ void cullTracks(FILE *mlist, int cull_frame, int save_track_ids) * ... * */ - -int main(int argc, const char *argv[]) +int tracker(int argc, const char *argv[]) { - char version[5],tmp[2]; + char tmp[2]; int i, cur_frame, track_number, last_frame; - int molecules, temp, desc_len, cur_desc, save_track_ids; + int molecules, desc_len, cur_desc, save_track_ids; int *object_data_int, *descriptor; float max_radius, zmin, zmax, object_data[OBJECT_DATA_SIZE]; + size_t n_read; FILE *mlist; if (argc < 6){ @@ -376,7 +372,7 @@ int main(int argc, const char *argv[]) } fseek(mlist, MOLECULES, SEEK_SET); - fread(&molecules, sizeof(int), 1, mlist); + n_read = fread(&molecules, sizeof(int), 1, mlist); printf("Molecules: %d (%s)\n", molecules, argv[1]); printf("Descriptor: %s\n", argv[2]); @@ -413,7 +409,7 @@ int main(int argc, const char *argv[]) //printf(" (%f, %f, %f)\n", object_data[X], object_data[Y], object_data[Z]); } fseeko64(mlist, DATA + OBJECT_DATA_SIZE*DATUM_SIZE*(long long)i, SEEK_SET); - fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, mlist); + n_read = fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, mlist); cur_frame = object_data_int[FRAME]; cur_desc = descriptor[(cur_frame-1)%desc_len]; @@ -454,6 +450,8 @@ int main(int argc, const char *argv[]) printf("Found %d tracks\n", track_number); fclose(mlist); + + return 0; } diff --git a/storm_analysis/sa_utilities/tracker_c.py b/storm_analysis/sa_utilities/tracker_c.py new file mode 100644 index 00000000..2bbb037e --- /dev/null +++ b/storm_analysis/sa_utilities/tracker_c.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# +# Python interface to the C tracker library. Note that this +# library uses static variables so it is not thread safe. +# +# Hazen 10/16 +# + +import ctypes +import os + +import storm_analysis.sa_library.loadclib as loadclib + +ctracker = loadclib.loadCLibrary(os.path.dirname(__file__), "tracker") + +ctracker.tracker.argtypes = [ctypes.c_int, + ctypes.c_void_p] + +def tracker(mlist_filename, descriptor, radius, zmin, zmax, save_track_id = 0): + argc = 7 + argv = (ctypes.c_char_p * argc)() + argv[:] = ["tracker", + mlist_filename, + descriptor, + str(radius), + str(zmin), + str(zmax), + str(save_track_id)] + ctracker.tracker(argc, argv) + +if (__name__ == "__main__"): + import sys + + tracker(*sys.argv[1:]) From a36a89b99287997af736044ffd3e5d2055c8981f Mon Sep 17 00:00:00 2001 From: Hazen Babcock Date: Thu, 27 Oct 2016 17:58:01 -0400 Subject: [PATCH 02/17] Replace standalone C programs with C libraries that are called from Python. --- .../sa_utilities/apply-drift-correction.c | 23 ++++++------ .../sa_utilities/apply_drift_correction_c.py | 30 +++++++++++++++ storm_analysis/sa_utilities/avemlist.c | 20 ++++++---- storm_analysis/sa_utilities/avemlist_c.py | 29 +++++++++++++++ storm_analysis/sa_utilities/compile_linux.sh | 20 ++++++++-- storm_analysis/sa_utilities/fitz.c | 28 ++++++++------ storm_analysis/sa_utilities/fitz_c.py | 37 +++++++++++++++++++ storm_analysis/sa_utilities/std_analysis.py | 11 ++++-- storm_analysis/sa_utilities/tracker.c | 1 - storm_analysis/sa_utilities/tracker_c.py | 8 ++-- 10 files changed, 163 insertions(+), 44 deletions(-) create mode 100644 storm_analysis/sa_utilities/apply_drift_correction_c.py create mode 100644 storm_analysis/sa_utilities/avemlist_c.py create mode 100644 storm_analysis/sa_utilities/fitz_c.py diff --git a/storm_analysis/sa_utilities/apply-drift-correction.c b/storm_analysis/sa_utilities/apply-drift-correction.c index f9ce2de9..234ba771 100644 --- a/storm_analysis/sa_utilities/apply-drift-correction.c +++ b/storm_analysis/sa_utilities/apply-drift-correction.c @@ -2,16 +2,13 @@ * Applies drift correction to a molecule list, * works in place. * - * Hazen * 12/11 * - * Compilation instructions: + * Changed into a C library. * - * Linux: - * gcc apply-drift-correction.c -o apply-drift-correction + * 10/16 * - * Windows: - * gcc apply-drift-correction.c -o apply-drift-correction + * Hazen */ @@ -23,21 +20,23 @@ #include "insight.h" +int applyDriftCorrection(int, const char **); + /* - * Main + * applyDriftCorrection * * mlist - the molecule list file * drift - the drift correction file (in i3 standard format). * */ - -int main(int argc, const char *argv[]) +int applyDriftCorrection(int argc, const char *argv[]) { int i,cur_frame,frames,molecules,temp; int *object_data_int; char str[100]; float *dx,*dy,*dz; float object_data[OBJECT_DATA_SIZE]; + size_t n_read; FILE *mlist_fp,*drift_fp; if (argc != 3){ @@ -55,7 +54,7 @@ int main(int argc, const char *argv[]) // Figure out how many molecules there are to process. mlist_fp = fopen(argv[1], "rb+"); fseek(mlist_fp, MOLECULES, SEEK_SET); - fread(&molecules, sizeof(int), 1, mlist_fp); + n_read = fread(&molecules, sizeof(int), 1, mlist_fp); printf(" Molecules: %d\n", molecules); // Determine size of drift correction file & load into memory. @@ -92,7 +91,7 @@ int main(int argc, const char *argv[]) printf(" Processing molecule %d in frame %d (apply-drift-correction)\n", i, cur_frame); } fseeko64(mlist_fp, DATA + OBJECT_DATA_SIZE*DATUM_SIZE*(long long)i, SEEK_SET); - fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, mlist_fp); + n_read = fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, mlist_fp); cur_frame = object_data_int[FRAME]-1; // range checking @@ -117,6 +116,8 @@ int main(int argc, const char *argv[]) free(dy); free(dz); fclose(mlist_fp); + + return 0; } diff --git a/storm_analysis/sa_utilities/apply_drift_correction_c.py b/storm_analysis/sa_utilities/apply_drift_correction_c.py new file mode 100644 index 00000000..1e35a3c3 --- /dev/null +++ b/storm_analysis/sa_utilities/apply_drift_correction_c.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# Python interface to the C apply_drift_correction library. Note that this +# library uses static variables so it is not thread safe. +# +# Hazen 10/16 +# + +import ctypes +import os + +import storm_analysis.sa_library.loadclib as loadclib + +adc = loadclib.loadCLibrary(os.path.dirname(__file__), "apply-drift-correction") + +adc.applyDriftCorrection.argtypes = [ctypes.c_int, + ctypes.c_void_p] + +def applyDriftCorrection(mlist_filename, drift_filename): + argc = 3 + argv = (ctypes.c_char_p * argc)() + argv[:] = ["apply-drift-correction", + mlist_filename, + drift_filename] + adc.applyDriftCorrection(argc, argv) + +if (__name__ == "__main__"): + import sys + + applyDriftCorrection(*sys.argv[1:]) diff --git a/storm_analysis/sa_utilities/avemlist.c b/storm_analysis/sa_utilities/avemlist.c index c012da9a..ee04b25c 100644 --- a/storm_analysis/sa_utilities/avemlist.c +++ b/storm_analysis/sa_utilities/avemlist.c @@ -29,7 +29,7 @@ /* Functions */ void averageTrack(FILE *, FILE *, int, int); - +int avemlist(int, const char **); /* These are as in the insight.h file */ static int average_flag[] = {AVERAGE, /* XO */ @@ -61,6 +61,7 @@ void averageTrack(FILE *input_mlist, FILE *output_mlist, int molecule, int visit { int i,*object_data_int,elements,track_id; float weight, total_weight; + size_t n_read; float average_data[OBJECT_DATA_SIZE], object_data[OBJECT_DATA_SIZE]; elements = 1; @@ -68,7 +69,7 @@ void averageTrack(FILE *input_mlist, FILE *output_mlist, int molecule, int visit // load object data fseeko64(input_mlist, DATA + OBJECT_DATA_SIZE*DATUM_SIZE*(long long)molecule, SEEK_SET); - fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, input_mlist); + n_read = fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, input_mlist); for(i=0;i<(OBJECT_DATA_SIZE);i++){ average_data[i] = object_data[i]; } @@ -94,7 +95,7 @@ void averageTrack(FILE *input_mlist, FILE *output_mlist, int molecule, int visit molecule = object_data_int[LINK]; // printf(" %d\n", molecule); fseeko64(input_mlist, DATA + OBJECT_DATA_SIZE*DATUM_SIZE*(long long)molecule, SEEK_SET); - fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, input_mlist); + n_read = fread(&object_data, sizeof(float), OBJECT_DATA_SIZE, input_mlist); if (TESTING){ if(track_id != object_data_int[FITI]){ @@ -142,11 +143,12 @@ void averageTrack(FILE *input_mlist, FILE *output_mlist, int molecule, int visit * */ -int main(int argc, const char *argv[]) +int avemlist(int argc, const char *argv[]) { int i, last_frame, molecules, tracks, unvisited; char header[DATA]; int object_data[OBJECT_DATA_SIZE]; + size_t n_read; FILE *input_mlist, *output_mlist; if (argc != 3){ @@ -171,15 +173,15 @@ int main(int argc, const char *argv[]) exit(0); } - fread(&header, sizeof(char), DATA, input_mlist); + n_read = fread(&header, sizeof(char), DATA, input_mlist); fwrite(&header, sizeof(char), DATA, output_mlist); fseek(input_mlist, MOLECULES, SEEK_SET); - fread(&molecules, sizeof(int), 1, input_mlist); + n_read = fread(&molecules, sizeof(int), 1, input_mlist); // printf("Molecules: %d\n", molecules); fseek(input_mlist, DATA, SEEK_SET); - fread(&object_data, sizeof(int), OBJECT_DATA_SIZE, input_mlist); + n_read = fread(&object_data, sizeof(int), OBJECT_DATA_SIZE, input_mlist); unvisited = object_data[VISITED]; // printf("Unvisited: %d\n", unvisited); @@ -193,7 +195,7 @@ int main(int argc, const char *argv[]) printf("Processing molecule %d (avemlist)\n", i); } fseeko64(input_mlist, DATA + OBJECT_DATA_SIZE*DATUM_SIZE*(long long)i, SEEK_SET); - fread(&object_data, sizeof(int), OBJECT_DATA_SIZE, input_mlist); + n_read = fread(&object_data, sizeof(int), OBJECT_DATA_SIZE, input_mlist); if (last_frame != object_data[FRAME]){ fflush(input_mlist); last_frame = object_data[FRAME]; @@ -217,6 +219,8 @@ int main(int argc, const char *argv[]) fclose(input_mlist); fclose(output_mlist); + + return 0; } diff --git a/storm_analysis/sa_utilities/avemlist_c.py b/storm_analysis/sa_utilities/avemlist_c.py new file mode 100644 index 00000000..f1f7e63d --- /dev/null +++ b/storm_analysis/sa_utilities/avemlist_c.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# +# Python interface to the C avemlist library. +# +# Hazen 10/16 +# + +import ctypes +import os + +import storm_analysis.sa_library.loadclib as loadclib + +c_avemlist = loadclib.loadCLibrary(os.path.dirname(__file__), "avemlist") + +c_avemlist.avemlist.argtypes = [ctypes.c_int, + ctypes.c_void_p] + +def avemlist(input_filename, output_filename): + argc = 3 + argv = (ctypes.c_char_p * argc)() + argv[:] = ["avemlist", + input_filename, + output_filename] + c_avemlist.avemlist(argc, argv) + +if (__name__ == "__main__"): + import sys + + avemlist(*sys.argv[1:]) diff --git a/storm_analysis/sa_utilities/compile_linux.sh b/storm_analysis/sa_utilities/compile_linux.sh index 3ba4e3c1..5c590dab 100644 --- a/storm_analysis/sa_utilities/compile_linux.sh +++ b/storm_analysis/sa_utilities/compile_linux.sh @@ -1,6 +1,18 @@ #!/bin/bash -gcc tracker.c -o tracker -lc -lm -gcc apply-drift-correction.c -o apply-drift-correction -lc -gcc fitz.c -o fitz -lc -lm -gcc avemlist.c -o avemlist -lc -lm +gcc -fPIC -g -c -Wall -O3 tracker.c +gcc -shared -Wl,-soname,tracker.so.1 -o tracker.so.1.0.1 tracker.o +ln -s tracker.so.1.0.1 tracker.so + +gcc -fPIC -g -c -Wall -O3 apply-drift-correction.c +gcc -shared -Wl,-soname,apply-drift-correction.so.1 -o apply-drift-correction.so.1.0.1 apply-drift-correction.o +ln -s apply-drift-correction.so.1.0.1 apply-drift-correction.so + +gcc -fPIC -g -c -Wall -O3 fitz.c +gcc -shared -Wl,-soname,fitz.so.1 -o fitz.so.1.0.1 fitz.o +ln -s fitz.so.1.0.1 fitz.so + +gcc -fPIC -g -c -Wall -O3 avemlist.c +gcc -shared -Wl,-soname,avemlist.so.1 -o avemlist.so.1.0.1 avemlist.o +ln -s avemlist.so.1.0.1 avemlist.so + diff --git a/storm_analysis/sa_utilities/fitz.c b/storm_analysis/sa_utilities/fitz.c index dc38226e..dc642dce 100644 --- a/storm_analysis/sa_utilities/fitz.c +++ b/storm_analysis/sa_utilities/fitz.c @@ -1,17 +1,14 @@ /* - * 07/11 - * * Performs z fit based on wx, wy on a Insight3 file. * Works "in place". * + * 07/11 * - * Hazen - * - * Compilation instructions: + * Changed into a C library. * - * Linux: - * gcc fitz.c -o fitz -lm + * 10/16 * + * Hazen */ @@ -29,6 +26,10 @@ double *wx_curve; double *wy_curve; +void initWxWy(double *, double *); +float findBestZ(double, double, double); +int fitz(int, const char **); + /* * Initialize wx, wy pre-calculated array curves. @@ -96,7 +97,7 @@ float findBestZ(double wx, double wy, double cutoff) /* - * Main + * fitz * * i3_file - insight3 file on which to perform z calculations. * cut_off - distance cutoff @@ -106,7 +107,7 @@ float findBestZ(double wx, double wy, double cutoff) * Expects calibration curves & molecule widths to be in nm. */ -int main(int argc, const char *argv[]) +int fitz(int argc, const char *argv[]) { int i,bad_cat,molecules,offset; float w,a,z; @@ -114,6 +115,7 @@ int main(int argc, const char *argv[]) double wx,wy; double wx_params[7]; double wy_params[7]; + size_t n_read; FILE *mlist; if (argc == 1){ @@ -137,7 +139,7 @@ int main(int argc, const char *argv[]) } fseek(mlist, MOLECULES, SEEK_SET); - fread(&molecules, sizeof(int), 1, mlist); + n_read = fread(&molecules, sizeof(int), 1, mlist); printf("Molecules: %d\n", molecules); initWxWy(wx_params, wy_params); @@ -151,10 +153,10 @@ int main(int argc, const char *argv[]) offset = DATA + i*OBJECT_DATA_SIZE*DATUM_SIZE; fseeko64(mlist, offset+WIDTH*DATUM_SIZE, SEEK_SET); - fread(&w, sizeof(float), 1, mlist); + n_read = fread(&w, sizeof(float), 1, mlist); fseeko64(mlist, offset+ASPECT*DATUM_SIZE, SEEK_SET); - fread(&a, sizeof(float), 1, mlist); + n_read = fread(&a, sizeof(float), 1, mlist); wx = sqrt(sqrt(w*w/a)); wy = sqrt(sqrt(w*w*a)); @@ -181,6 +183,8 @@ int main(int argc, const char *argv[]) free(wx_curve); free(wy_curve); fclose(mlist); + + return 0; } diff --git a/storm_analysis/sa_utilities/fitz_c.py b/storm_analysis/sa_utilities/fitz_c.py new file mode 100644 index 00000000..36233b67 --- /dev/null +++ b/storm_analysis/sa_utilities/fitz_c.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python +# +# Python interface to the C fitz library. Note that this +# library uses static variables so it is not thread safe. +# +# Hazen 10/16 +# + +import ctypes +import os + +import storm_analysis.sa_library.loadclib as loadclib + +c_fitz = loadclib.loadCLibrary(os.path.dirname(__file__), "fitz") + +c_fitz.fitz.argtypes = [ctypes.c_int, + ctypes.c_void_p] + +def fitz(i3_filename, cut_off, wx_params, wy_params): + argc = 17 + argv = (ctypes.c_char_p * argc)() + argv[0] = "fitz" + argv[1] = i3_filename + argv[2] = str(cut_off) + for i in range(7): + argv[i+3] = str(wx_params[i]) + argv[i+10] = str(wy_params[i]) + c_fitz.fitz(argc, argv) + +if (__name__ == "__main__"): + import sys + + if(len(sys.argv) != 17): + print("fitz_c.py requires 16 arguments") + exit() + + fitz(sys.argv[1], sys.argv[2], sys.argv[3:10], sys.argv[10:17]) diff --git a/storm_analysis/sa_utilities/std_analysis.py b/storm_analysis/sa_utilities/std_analysis.py index f9a03783..dc9d2be5 100644 --- a/storm_analysis/sa_utilities/std_analysis.py +++ b/storm_analysis/sa_utilities/std_analysis.py @@ -21,7 +21,8 @@ # Averages all the molecules in a track into a single molecule. def averaging(mol_list_filename, ave_list_filename): - proc_params = [src_dir + "avemlist", + proc_params = ["python", + src_dir + "avemlist_c.py", mol_list_filename, ave_list_filename] subprocess.call(proc_params) @@ -56,7 +57,8 @@ def driftCorrection(list_files, parameters): if (os.path.exists(drift_name)): for list_file in list_files: - proc_params = [src_dir + "apply-drift-correction", + proc_params = ["python", + src_dir + "apply_drift_correction_c.py", list_file, drift_name] subprocess.call(proc_params) @@ -196,7 +198,7 @@ def standardAnalysis(find_peaks, data_file, mlist_file, parameters): def tracking(mol_list_filename, parameters): [min_z, max_z] = params.getZRange(parameters) proc_params = ["python", - src_dir + "tracker_c", + src_dir + "tracker_c.py", mol_list_filename, parameters.descriptor, str(parameters.radius), @@ -213,7 +215,8 @@ def zFitting(mol_list_filename, parameters): else: wx_str = list(map(str, params.getWidthParams(parameters, "x"))) wy_str = list(map(str, params.getWidthParams(parameters, "y"))) - proc_params = [src_dir + "fitz", + proc_params = ["python", + src_dir + "fitz_c.py", mol_list_filename, str(parameters.cutoff)] + wx_str + wy_str subprocess.call(proc_params) diff --git a/storm_analysis/sa_utilities/tracker.c b/storm_analysis/sa_utilities/tracker.c index 3b790fdd..23648784 100644 --- a/storm_analysis/sa_utilities/tracker.c +++ b/storm_analysis/sa_utilities/tracker.c @@ -18,7 +18,6 @@ * 10/16 * * Hazen - * */ diff --git a/storm_analysis/sa_utilities/tracker_c.py b/storm_analysis/sa_utilities/tracker_c.py index 2bbb037e..cf3f28f2 100644 --- a/storm_analysis/sa_utilities/tracker_c.py +++ b/storm_analysis/sa_utilities/tracker_c.py @@ -11,10 +11,10 @@ import storm_analysis.sa_library.loadclib as loadclib -ctracker = loadclib.loadCLibrary(os.path.dirname(__file__), "tracker") +c_tracker = loadclib.loadCLibrary(os.path.dirname(__file__), "tracker") -ctracker.tracker.argtypes = [ctypes.c_int, - ctypes.c_void_p] +c_tracker.tracker.argtypes = [ctypes.c_int, + ctypes.c_void_p] def tracker(mlist_filename, descriptor, radius, zmin, zmax, save_track_id = 0): argc = 7 @@ -26,7 +26,7 @@ def tracker(mlist_filename, descriptor, radius, zmin, zmax, save_track_id = 0): str(zmin), str(zmax), str(save_track_id)] - ctracker.tracker(argc, argv) + c_tracker.tracker(argc, argv) if (__name__ == "__main__"): import sys From 2d7803b9bdae6d5f0d01e025138306565dcb2bc3 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 10:00:53 -0400 Subject: [PATCH 03/17] Basic setup.py --- __init__.py | 1 - setup.py | 115 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 115 insertions(+), 1 deletion(-) delete mode 100644 __init__.py create mode 100644 setup.py diff --git a/__init__.py b/__init__.py deleted file mode 100644 index 013e4b7e..00000000 --- a/__init__.py +++ /dev/null @@ -1 +0,0 @@ -#!/usr/bin/python diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..c59c426a --- /dev/null +++ b/setup.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from setuptools import setup, find_packages +from distutils.core import Extension + +import numpy + +version = "1.0" +description = "Read and write image data from and to TIFF files." +long_description = "" + +def get_c_extensions(): + extensions = [#Extension("", ["./storm_analysis/fista/fista_decon_utilities.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/fista/fista_fft.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_library/matched_filter.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_library/grid.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + Extension("", ["./storm_analysis/sa_library/multi_fit.c"], + include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_library/ia_utilities.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/dbscan/dbscan.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/dbscan/kdtree.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/decon_storm/mlem_sparse.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sCMOS/scmos_utilities.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/fista_lib.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_storm.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_sse.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_general.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_imagea.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_common.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_imagea_common.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/L1H/homotopy_gpu.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_utilities/fitz.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_utilities/tracker.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_utilities/avemlist.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/frc/frc.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/simulator/draw_gaussians.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/simulator/zernike.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/spliner/cubic_spline.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/spliner/multi_fit_core.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/spliner/cubic_fit.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + #Extension("", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], + # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + ] + + return extensions + +setup( + name='storm_analysis', + version=version, + description=description, + long_description=long_description, + author='Hazen Babcock', + author_email='hbabcock at fas.harvard.edu', + url='https://github.com/ZhuangLab/storm-analysis', + + zip_safe=False, + packages=find_packages(), + + ext_modules=get_c_extensions(), + package_data={ + #'sample': ['package_data.dat'], + # If any package contains *.txt or *.rst files, include them: + #'': ['*.txt', '*.rst'], + # And include any *.msg files found in the 'hello' package, too: + #'hello': ['*.msg'], + }, + exclude_package_data={ + #'': ['README.txt'] + }, + include_package_data=True, + + requires=['numpy (>=1.8.2)', 'setuptools'], + + license="", + keywords='storm,microscopy', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: BSD License', + "Programming Language :: C", + "Programming Language :: Python :: 2", + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + ], +) \ No newline at end of file From f31e02bb02880c1b98288c889249e98a823f5485 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 10:58:22 -0400 Subject: [PATCH 04/17] Add some mechanism to load C extension --- .gitignore | 2 + setup.py | 90 +++++++-------------- storm_analysis/sa_library/grid_c.py | 2 +- storm_analysis/sa_library/ia_utilities_c.py | 2 +- storm_analysis/sa_library/loadclib.py | 38 ++++++--- storm_analysis/sa_library/multi_fit_c.py | 2 +- 6 files changed, 65 insertions(+), 71 deletions(-) diff --git a/.gitignore b/.gitignore index baf65ba6..17aee38f 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,8 @@ ############# *.py[co] +*.egg-info +build/ ############# ## Emacs diff --git a/setup.py b/setup.py index c59c426a..893b879b 100644 --- a/setup.py +++ b/setup.py @@ -11,67 +11,39 @@ long_description = "" def get_c_extensions(): - extensions = [#Extension("", ["./storm_analysis/fista/fista_decon_utilities.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/fista/fista_fft.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_library/matched_filter.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_library/grid.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - Extension("", ["./storm_analysis/sa_library/multi_fit.c"], - include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_library/ia_utilities.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/dbscan/dbscan.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/dbscan/kdtree.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/decon_storm/mlem_sparse.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sCMOS/scmos_utilities.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/fista_lib.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_storm.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_sse.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_general.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_imagea.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_common.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_imagea_common.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/L1H/homotopy_gpu.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_utilities/fitz.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_utilities/tracker.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_utilities/avemlist.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/frc/frc.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/simulator/draw_gaussians.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/simulator/zernike.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/spliner/cubic_spline.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/spliner/multi_fit_core.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/spliner/cubic_fit.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), - #Extension("", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], - # include_dirs=[], library_dirs=[], extra_link_args=[], extra_compile_args=[]), + extensions = [#Extension("", ["./storm_analysis/fista/fista_decon_utilities.c"], ), + #Extension("", ["./storm_analysis/fista/fista_fft.c"], ), + #Extension("", ["./storm_analysis/sa_library/matched_filter.c"], ), + Extension("storm_analysis.sa_library._grid", ["./storm_analysis/sa_library/grid.c"], ), + Extension("storm_analysis.sa_library._multi_fit", ["./storm_analysis/sa_library/multi_fit.c"], + libraries=["lapack"]), + Extension("storm_analysis.sa_library._ia_utilities", ["./storm_analysis/sa_library/ia_utilities.c"]), + #Extension("", ["./storm_analysis/dbscan/dbscan.c"], ), + #Extension("", ["./storm_analysis/dbscan/kdtree.c"], ), + #Extension("", ["./storm_analysis/decon_storm/mlem_sparse.c"], ), + #Extension("", ["./storm_analysis/sCMOS/scmos_utilities.c"], ), + #Extension("", ["./storm_analysis/L1H/fista_lib.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_storm.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_sse.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_general.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_imagea.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_common.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_imagea_common.c"], ), + #Extension("", ["./storm_analysis/L1H/homotopy_gpu.c"], ), + #Extension("", ["./storm_analysis/sa_utilities/fitz.c"], ), + #Extension("", ["./storm_analysis/sa_utilities/tracker.c"], ), + #Extension("", ["./storm_analysis/sa_utilities/avemlist.c"], ), + #Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"], ), + #Extension("", ["./storm_analysis/frc/frc.c"], ), + #Extension("", ["./storm_analysis/simulator/draw_gaussians.c"], ), + #Extension("", ["./storm_analysis/simulator/zernike.c"], ), + #Extension("", ["./storm_analysis/spliner/cubic_spline.c"], ), + #Extension("", ["./storm_analysis/spliner/multi_fit_core.c"], ), + #Extension("", ["./storm_analysis/spliner/cubic_fit.c"], ), + #Extension("", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], ), ] - return extensions + return extensions setup( name='storm_analysis', diff --git a/storm_analysis/sa_library/grid_c.py b/storm_analysis/sa_library/grid_c.py index 94264bca..2e1953be 100644 --- a/storm_analysis/sa_library/grid_c.py +++ b/storm_analysis/sa_library/grid_c.py @@ -15,7 +15,7 @@ import storm_analysis.sa_library.loadclib as loadclib -grid = loadclib.loadCLibrary(os.path.dirname(__file__), "grid") +grid = loadclib.loadCLibrary("storm_analysis.sa_library", "_grid") # Function specifications grid.grid2D.argtypes = [ndpointer(dtype=numpy.int32), diff --git a/storm_analysis/sa_library/ia_utilities_c.py b/storm_analysis/sa_library/ia_utilities_c.py index 9550cdcf..256675d2 100644 --- a/storm_analysis/sa_library/ia_utilities_c.py +++ b/storm_analysis/sa_library/ia_utilities_c.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib -util = loadclib.loadCLibrary(os.path.dirname(__file__), "ia_utilities") +util = loadclib.loadCLibrary("storm_analysis.sa_library", "_ia_utilities") # C interface definition util.findLocalMaxima.argtypes = [ndpointer(dtype=numpy.float64), diff --git a/storm_analysis/sa_library/loadclib.py b/storm_analysis/sa_library/loadclib.py index 2bc50e3f..d8031b2b 100644 --- a/storm_analysis/sa_library/loadclib.py +++ b/storm_analysis/sa_library/loadclib.py @@ -7,18 +7,38 @@ import ctypes import sys +import os +import re -def loadCLibrary(directory, library): +import storm_analysis - if (directory == ""): - directory = "./" - else: - directory += "/" - if (sys.platform == "win32"): - return ctypes.cdll.LoadLibrary(directory + library + ".dll") - else: - return ctypes.cdll.LoadLibrary(directory + library + ".so") +def loadCLibrary(package, library): + + # Something like /usr/lib/python3.5/site-packages/storm_analysis + module_path = os.path.dirname(os.path.dirname(os.path.abspath(storm_analysis.__file__))) + + # Something like /usr/lib/python3.5/site-packages/storm_analysis/sa_library/ + lib_path = os.path.join(module_path, package.replace(".", os.path.sep)) + files = os.listdir(lib_path) + + lib_extension = "so" + if (sys.platform == "win32"): + lib_extension = "dll" + + # Something like '_ia_utilities.*\.so' + r = re.compile('{}.*\.{}'.format(library, lib_extension)) + + lib_filename = list(filter(r.match, files)) + + if len(lib_filename) < 1: + raise Exception("Can't find the library {} in the module {} " + "located in the storm_analysis package at {}".format(library, package, module_path)) + + # Something like _ia_utilities.cpython-35m-x86_64-linux-gnu.so + lib_filename = lib_filename[0] + + return ctypes.cdll.LoadLibrary(os.path.join(lib_path, lib_filename)) # # The MIT License diff --git a/storm_analysis/sa_library/multi_fit_c.py b/storm_analysis/sa_library/multi_fit_c.py index 96990e3a..132ff5e8 100644 --- a/storm_analysis/sa_library/multi_fit_c.py +++ b/storm_analysis/sa_library/multi_fit_c.py @@ -29,7 +29,7 @@ import storm_analysis.sa_library.ia_utilities_c as util_c import storm_analysis.sa_library.loadclib as loadclib -multi = loadclib.loadCLibrary(os.path.dirname(__file__), "multi_fit") +multi = loadclib.loadCLibrary("storm_analysis.sa_library", "_multi_fit") # C interface definition multi.getError.restype = c_double From b73bf7eeec269fc77e987e73720343e2969836dd Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 11:27:16 -0400 Subject: [PATCH 05/17] More extension --- setup.py | 29 ++++++++++++++++--- storm_analysis/sa_library/matched_filter_c.py | 2 +- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 893b879b..fcd82c6d 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,10 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import platform +import os +import sys + from setuptools import setup, find_packages from distutils.core import Extension @@ -11,13 +15,30 @@ long_description = "" def get_c_extensions(): + + include_dirs = [os.path.join(sys.prefix, "include")] + library_dirs = [] + + if platform.system() == 'Windows': + include_dirs += [os.environ['LIBRARY_INC']] + library_dirs += [os.environ['LIBRARY_LIB']] + elif platform.system() == 'Linux': + include_dirs += [] + library_dirs += [] + elif platform.system() == 'Darwin': + include_dirs += [] + library_dirs += [] + extensions = [#Extension("", ["./storm_analysis/fista/fista_decon_utilities.c"], ), #Extension("", ["./storm_analysis/fista/fista_fft.c"], ), - #Extension("", ["./storm_analysis/sa_library/matched_filter.c"], ), - Extension("storm_analysis.sa_library._grid", ["./storm_analysis/sa_library/grid.c"], ), + Extension("storm_analysis.sa_library._matched_filter", ["./storm_analysis/sa_library/matched_filter.c"], + libraries=library_dirs + ["fftw3"], include_dirs=include_dirs + []), + Extension("storm_analysis.sa_library._grid", ["./storm_analysis/sa_library/grid.c"], + libraries=library_dirs, include_dirs=include_dirs), Extension("storm_analysis.sa_library._multi_fit", ["./storm_analysis/sa_library/multi_fit.c"], - libraries=["lapack"]), - Extension("storm_analysis.sa_library._ia_utilities", ["./storm_analysis/sa_library/ia_utilities.c"]), + libraries=library_dirs + ["lapack"], include_dirs=include_dirs + []), + Extension("storm_analysis.sa_library._ia_utilities", ["./storm_analysis/sa_library/ia_utilities.c"], + libraries=library_dirs, include_dirs=include_dirs), #Extension("", ["./storm_analysis/dbscan/dbscan.c"], ), #Extension("", ["./storm_analysis/dbscan/kdtree.c"], ), #Extension("", ["./storm_analysis/decon_storm/mlem_sparse.c"], ), diff --git a/storm_analysis/sa_library/matched_filter_c.py b/storm_analysis/sa_library/matched_filter_c.py index 24fe7d07..fffc7407 100644 --- a/storm_analysis/sa_library/matched_filter_c.py +++ b/storm_analysis/sa_library/matched_filter_c.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib import storm_analysis.sa_library.recenter_psf as recenterPSF -m_filter = loadclib.loadCLibrary(os.path.dirname(__file__), "matched_filter") +m_filter = loadclib.loadCLibrary("storm_analysis.sa_library", "_matched_filter") m_filter.cleanup.argtypes = [ctypes.c_void_p] m_filter.convolve.argtypes = [ctypes.c_void_p, From 82b4c341e93d04a7ead42d69f5eed4b3f1a68b05 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 12:10:54 -0400 Subject: [PATCH 06/17] More extensions --- setup.py | 87 +++++++++++++------ storm_analysis/L1H/homotopy_c.py | 2 +- storm_analysis/L1H/homotopy_imagea_c.py | 2 +- storm_analysis/dbscan/dbscan_c.py | 4 +- storm_analysis/decon_storm/mlem_c.py | 2 +- .../fista/fista_decon_utilities_c.py | 2 +- storm_analysis/fista/fista_fft_c.py | 2 +- storm_analysis/frc/frc_c.py | 2 +- .../rolling_ball_bgr/rolling_ball_lib_c.py | 2 +- storm_analysis/sCMOS/scmos_utilities_c.py | 2 +- storm_analysis/simulator/drawgaussians.py | 2 +- storm_analysis/simulator/zernike_c.py | 2 +- storm_analysis/spliner/cubic_fit_c.py | 2 +- storm_analysis/spliner/cubic_spline_c.py | 6 +- 14 files changed, 77 insertions(+), 42 deletions(-) diff --git a/setup.py b/setup.py index fcd82c6d..ab948ffe 100644 --- a/setup.py +++ b/setup.py @@ -29,8 +29,11 @@ def get_c_extensions(): include_dirs += [] library_dirs += [] - extensions = [#Extension("", ["./storm_analysis/fista/fista_decon_utilities.c"], ), - #Extension("", ["./storm_analysis/fista/fista_fft.c"], ), + extensions = [Extension("storm_analysis.fista._fista_decon_utilities", ["./storm_analysis/fista/fista_decon_utilities.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.fista._fista_fft", ["./storm_analysis/fista/fista_fft.c"], + libraries=library_dirs + ["fftw3"], include_dirs=include_dirs), + Extension("storm_analysis.sa_library._matched_filter", ["./storm_analysis/sa_library/matched_filter.c"], libraries=library_dirs + ["fftw3"], include_dirs=include_dirs + []), Extension("storm_analysis.sa_library._grid", ["./storm_analysis/sa_library/grid.c"], @@ -39,31 +42,63 @@ def get_c_extensions(): libraries=library_dirs + ["lapack"], include_dirs=include_dirs + []), Extension("storm_analysis.sa_library._ia_utilities", ["./storm_analysis/sa_library/ia_utilities.c"], libraries=library_dirs, include_dirs=include_dirs), - #Extension("", ["./storm_analysis/dbscan/dbscan.c"], ), - #Extension("", ["./storm_analysis/dbscan/kdtree.c"], ), - #Extension("", ["./storm_analysis/decon_storm/mlem_sparse.c"], ), - #Extension("", ["./storm_analysis/sCMOS/scmos_utilities.c"], ), - #Extension("", ["./storm_analysis/L1H/fista_lib.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_storm.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_sse.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_general.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_imagea.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_common.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_imagea_common.c"], ), - #Extension("", ["./storm_analysis/L1H/homotopy_gpu.c"], ), - #Extension("", ["./storm_analysis/sa_utilities/fitz.c"], ), - #Extension("", ["./storm_analysis/sa_utilities/tracker.c"], ), - #Extension("", ["./storm_analysis/sa_utilities/avemlist.c"], ), - #Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"], ), - #Extension("", ["./storm_analysis/frc/frc.c"], ), - #Extension("", ["./storm_analysis/simulator/draw_gaussians.c"], ), - #Extension("", ["./storm_analysis/simulator/zernike.c"], ), - #Extension("", ["./storm_analysis/spliner/cubic_spline.c"], ), - #Extension("", ["./storm_analysis/spliner/multi_fit_core.c"], ), - #Extension("", ["./storm_analysis/spliner/cubic_fit.c"], ), - #Extension("", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], ), + + Extension("storm_analysis.dbscan._dbscan", ["./storm_analysis/dbscan/dbscan.c", + "./storm_analysis/dbscan/kdtree.c"], + libraries=library_dirs, include_dirs=include_dirs), + + Extension("storm_analysis.decon_storm._mlem_sparse", ["./storm_analysis/decon_storm/mlem_sparse.c"], + libraries=library_dirs, include_dirs=include_dirs), + + Extension("storm_analysis.sCMOS._scmos_utilities", ["./storm_analysis/sCMOS/scmos_utilities.c"], + libraries=library_dirs, include_dirs=include_dirs), + + Extension("storm_analysis.frc._frc", ["./storm_analysis/frc/frc.c"], + libraries=library_dirs, include_dirs=include_dirs), + + Extension("storm_analysis.simulator._draw_gaussians", ["./storm_analysis/simulator/draw_gaussians.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.simulator._zernike", ["./storm_analysis/simulator/zernike.c"], + libraries=library_dirs, include_dirs=include_dirs), + + Extension("storm_analysis.spliner._cubic_spline", ["./storm_analysis/spliner/cubic_spline.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.spliner._cubic_fit", ["./storm_analysis/spliner/cubic_fit.c", + "./storm_analysis/spliner/multi_fit_core.c", + "./storm_analysis/spliner/cubic_spline.c"], + libraries=library_dirs + ["lapack"], include_dirs=include_dirs), + + Extension("storm_analysis.rolling_ball_bgr._rolling_ball_lib", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], + libraries=library_dirs, include_dirs=include_dirs), + + # Executables : + # It's hard to integrate executables into a Python package + # The best way to integrate them would be to convert them into libraries. + # Extension("", ["./storm_analysis/sa_utilities/fitz.c"]), + # Extension("", ["./storm_analysis/sa_utilities/tracker.c"]), + # Extension("", ["./storm_analysis/sa_utilities/avemlist.c"]), + # Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"]), ] + if platform.system() == 'Windows': + extensions += [Extension("storm_analysis.L1H._fista_lib", ["./storm_analysis/L1H/fista_lib.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_storm", ["./storm_analysis/L1H/homotopy_storm.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_sse", ["./storm_analysis/L1H/homotopy_sse.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_general", ["./storm_analysis/L1H/homotopy_general.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_imagea", ["./storm_analysis/L1H/homotopy_imagea.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_common", ["./storm_analysis/L1H/homotopy_common.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_imagea_common", ["./storm_analysis/L1H/homotopy_imagea_common.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.L1H._homotopy_gpu", ["./storm_analysis/L1H/homotopy_gpu.c"], + libraries=library_dirs, include_dirs=include_dirs), + ] + return extensions setup( @@ -87,7 +122,7 @@ def get_c_extensions(): #'hello': ['*.msg'], }, exclude_package_data={ - #'': ['README.txt'] + '': ['*README.txt', '*README.md', '*compile*.sh', '*compile*.bat'] }, include_package_data=True, diff --git a/storm_analysis/L1H/homotopy_c.py b/storm_analysis/L1H/homotopy_c.py index fb14c30f..5b9f1f6d 100644 --- a/storm_analysis/L1H/homotopy_c.py +++ b/storm_analysis/L1H/homotopy_c.py @@ -21,7 +21,7 @@ def setCInterface(homotopy_lib): global homotopy - homotopy = loadclib.loadCLibrary(os.path.dirname(__file__), homotopy_lib) + homotopy = loadclib.loadCLibrary("storm_analysis.L1H", "_" + homotopy_lib) l1flt_size = homotopy.getL1FLTSize() if(l1flt_size == 4): diff --git a/storm_analysis/L1H/homotopy_imagea_c.py b/storm_analysis/L1H/homotopy_imagea_c.py index 1d1ef99c..ce3e0cbd 100644 --- a/storm_analysis/L1H/homotopy_imagea_c.py +++ b/storm_analysis/L1H/homotopy_imagea_c.py @@ -22,7 +22,7 @@ def setCInterface(homotopy_ia_lib): global homotopyIa - homotopyIa = loadclib.loadCLibrary(os.path.dirname(__file__), homotopy_ia_lib) + homotopyIa = loadclib.loadCLibrary("storm_analysis.L1H", "_" + homotopy_ia_lib) # Check that C libraries were compiled as expected. l1flt_size = homotopyIa.getL1FLTSize() diff --git a/storm_analysis/dbscan/dbscan_c.py b/storm_analysis/dbscan/dbscan_c.py index 870d2c25..69a17bc7 100644 --- a/storm_analysis/dbscan/dbscan_c.py +++ b/storm_analysis/dbscan/dbscan_c.py @@ -17,7 +17,7 @@ import storm_analysis.sa_library.loadclib as loadclib -lib_dbscan = loadclib.loadCLibrary(os.path.dirname(__file__), "dbscan") +lib_dbscan = loadclib.loadCLibrary("storm_analysis.dbscan", "_dbscan") lib_dbscan.dbscan.argtypes = [ndpointer(dtype=numpy.float32), ndpointer(dtype=numpy.float32), @@ -72,7 +72,7 @@ def dbscan(x, y, z, c, eps, min_points, z_factor = 0.5, verbose = True): # Print number of clusters if verbose: n_clusters_ = len(set(c_l)) - (1 if -1 in c_l else 0) - print 'Estimated number of clusters: %d' % n_clusters_ + print('Estimated number of clusters: %d' % n_clusters_) return c_l diff --git a/storm_analysis/decon_storm/mlem_c.py b/storm_analysis/decon_storm/mlem_c.py index 8817ce79..36df20b9 100644 --- a/storm_analysis/decon_storm/mlem_c.py +++ b/storm_analysis/decon_storm/mlem_c.py @@ -28,7 +28,7 @@ c_double_p = POINTER(c_double) # Load mlem C library -mlem = loadclib.loadCLibrary("mlem_sparse") +mlem = loadclib.loadCLibrary("storm_analysis.decon_storm", "_mlem_sparse") # Define structures class GAUSS(Structure): diff --git a/storm_analysis/fista/fista_decon_utilities_c.py b/storm_analysis/fista/fista_decon_utilities_c.py index 83ac1f19..3c316e53 100644 --- a/storm_analysis/fista/fista_decon_utilities_c.py +++ b/storm_analysis/fista/fista_decon_utilities_c.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib -fd_util = loadclib.loadCLibrary(os.path.dirname(__file__), "fista_decon_utilities") +fd_util = loadclib.loadCLibrary("storm_analysis.fista", "_fista_decon_utilities") # C interface definition fd_util.label.argtypes = [ndpointer(dtype=numpy.float64), diff --git a/storm_analysis/fista/fista_fft_c.py b/storm_analysis/fista/fista_fft_c.py index 0b2af0bb..caeb4912 100644 --- a/storm_analysis/fista/fista_fft_c.py +++ b/storm_analysis/fista/fista_fft_c.py @@ -16,7 +16,7 @@ import storm_analysis.fista.fista_3d as fista3D -fista_fft = loadclib.loadCLibrary(os.path.dirname(__file__), "fista_fft") +fista_fft = loadclib.loadCLibrary("storm_analysis.fista", "_fista_fft") # C interface definition fista_fft.getXVector.argtypes = [ndpointer(dtype=numpy.float64)] diff --git a/storm_analysis/frc/frc_c.py b/storm_analysis/frc/frc_c.py index 54ed88e6..40463ddc 100644 --- a/storm_analysis/frc/frc_c.py +++ b/storm_analysis/frc/frc_c.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib -frc_lib = loadclib.loadCLibrary(os.path.dirname(__file__), "frc") +frc_lib = loadclib.loadCLibrary("storm_analysis.frc", "_frc") # Function specifications. frc_lib.calc_frc.argtypes = [ndpointer(dtype=numpy.complex128), diff --git a/storm_analysis/rolling_ball_bgr/rolling_ball_lib_c.py b/storm_analysis/rolling_ball_bgr/rolling_ball_lib_c.py index 4db290bb..ade3e8a8 100644 --- a/storm_analysis/rolling_ball_bgr/rolling_ball_lib_c.py +++ b/storm_analysis/rolling_ball_bgr/rolling_ball_lib_c.py @@ -18,7 +18,7 @@ import storm_analysis.sa_library.loadclib as loadclib -rball = loadclib.loadCLibrary(os.path.dirname(__file__), "rolling_ball_lib") +rball = loadclib.loadCLibrary("storm_analysis.rolling_ball_bgr", "_rolling_ball_lib") # C interface definition rball.estimateBg.argtypes = [ndpointer(dtype=numpy.float64), diff --git a/storm_analysis/sCMOS/scmos_utilities_c.py b/storm_analysis/sCMOS/scmos_utilities_c.py index 4c1e4013..9ad2bb1c 100644 --- a/storm_analysis/sCMOS/scmos_utilities_c.py +++ b/storm_analysis/sCMOS/scmos_utilities_c.py @@ -18,7 +18,7 @@ import storm_analysis.sa_library.loadclib as loadclib -slib = loadclib.loadCLibrary(os.path.dirname(__file__), "scmos_utilities") +slib = loadclib.loadCLibrary("storm_analysis.sCMOS", "_scmos_utilities") # C interface definition. slib.deregularize.argtypes = [ndpointer(dtype=numpy.float64), diff --git a/storm_analysis/simulator/drawgaussians.py b/storm_analysis/simulator/drawgaussians.py index 34e18615..5fe00980 100644 --- a/storm_analysis/simulator/drawgaussians.py +++ b/storm_analysis/simulator/drawgaussians.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib -drawgauss = loadclib.loadCLibrary(os.path.dirname(__file__), "draw_gaussians") +drawgauss = loadclib.loadCLibrary("storm_analysis.simulator", "_draw_gaussians") drawgauss.drawGaussians.argtypes = [ndpointer(dtype = numpy.float64), ndpointer(dtype = numpy.float64), diff --git a/storm_analysis/simulator/zernike_c.py b/storm_analysis/simulator/zernike_c.py index a46aee6f..a5c17dfc 100644 --- a/storm_analysis/simulator/zernike_c.py +++ b/storm_analysis/simulator/zernike_c.py @@ -13,7 +13,7 @@ import storm_analysis.sa_library.loadclib as loadclib -zernike = loadclib.loadCLibrary(os.path.dirname(__file__), "zernike") +zernike = loadclib.loadCLibrary("storm_analysis.simulator", "_zernike") zernike.zernike.argtypes = [ctypes.c_int, ctypes.c_int, diff --git a/storm_analysis/spliner/cubic_fit_c.py b/storm_analysis/spliner/cubic_fit_c.py index f509c3ea..be6c30ad 100644 --- a/storm_analysis/spliner/cubic_fit_c.py +++ b/storm_analysis/spliner/cubic_fit_c.py @@ -17,7 +17,7 @@ import storm_analysis.spliner.spline3D as spline3D # Load the library. -cubic_fit = loadclib.loadCLibrary(os.path.dirname(__file__), "cubic_fit") +cubic_fit = loadclib.loadCLibrary("storm_analysis.spliner", "_cubic_fit") # C interface definition. cubic_fit.fSpline2D.argtypes = [ctypes.c_double, diff --git a/storm_analysis/spliner/cubic_spline_c.py b/storm_analysis/spliner/cubic_spline_c.py index b94369c0..57e34a92 100644 --- a/storm_analysis/spliner/cubic_spline_c.py +++ b/storm_analysis/spliner/cubic_spline_c.py @@ -13,13 +13,13 @@ import random import sys -import spline2D -import spline3D +from storm_analysis.spliner.spline2D import Spline2D +from storm_analysis.spliner.spline3D import Spline3D import storm_analysis.sa_library.loadclib as loadclib # Load the library. -cubic = loadclib.loadCLibrary(os.path.dirname(__file__), "cubic_spline") +cubic = loadclib.loadCLibrary("storm_analysis.spliner", "_cubic_spline") # C interface definition. cubic.computeDelta2D.argtypes = [ctypes.c_double, From bbce626132a52007ee553b70f276c73e659cdd22 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 12:12:03 -0400 Subject: [PATCH 07/17] Add test.py --- test.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 test.py diff --git a/test.py b/test.py new file mode 100644 index 00000000..15531f56 --- /dev/null +++ b/test.py @@ -0,0 +1,41 @@ +# Test every extensions have been correctly build + +import platform + +# Try to import all C extension +import storm_analysis.sa_library.ia_utilities_c +import storm_analysis.sa_library.multi_fit_c +import storm_analysis.sa_library.grid_c +import storm_analysis.sa_library.matched_filter_c + +import storm_analysis.frc.frc_c + +import storm_analysis.fista.fista_decon_utilities_c +import storm_analysis.fista.fista_fft_c + +import storm_analysis.fista.fista_fft_c + +import storm_analysis.dbscan.dbscan_c + +import storm_analysis.decon_storm.mlem_c + +import storm_analysis.sCMOS.scmos_utilities_c + +import storm_analysis.simulator.zernike_c +import storm_analysis.simulator.drawgaussians + +import storm_analysis.spliner.cubic_spline_c +import storm_analysis.spliner.cubic_fit_c + +import storm_analysis.rolling_ball_bgr.rolling_ball_lib_c + +if platform.system() == 'Windows': + import storm_analysis.sa_library.loadclib as loadclib + loadclib.loadCLibrary("storm_analysis.L1H", "_fista_lib") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_storm") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_sse") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_general") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_common") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea_common") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_gpu") \ No newline at end of file From db0f935a0b45cd398c133d84ee31b4bb321ce2ba Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 12:23:56 -0400 Subject: [PATCH 08/17] Add tests --- .gitignore | 1 + setup.py | 3 ++ test.py | 41 --------------------------- tests/test_c_extensions_import.py | 46 +++++++++++++++++++++++++++++++ 4 files changed, 50 insertions(+), 41 deletions(-) delete mode 100644 test.py create mode 100644 tests/test_c_extensions_import.py diff --git a/.gitignore b/.gitignore index 17aee38f..aef4dc40 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ *.py[co] *.egg-info build/ +*.eggs ############# ## Emacs diff --git a/setup.py b/setup.py index ab948ffe..bf3b9c36 100644 --- a/setup.py +++ b/setup.py @@ -127,6 +127,9 @@ def get_c_extensions(): include_package_data=True, requires=['numpy (>=1.8.2)', 'setuptools'], + + setup_requires=['pytest-runner'], + tests_require=['pytest'], license="", keywords='storm,microscopy', diff --git a/test.py b/test.py deleted file mode 100644 index 15531f56..00000000 --- a/test.py +++ /dev/null @@ -1,41 +0,0 @@ -# Test every extensions have been correctly build - -import platform - -# Try to import all C extension -import storm_analysis.sa_library.ia_utilities_c -import storm_analysis.sa_library.multi_fit_c -import storm_analysis.sa_library.grid_c -import storm_analysis.sa_library.matched_filter_c - -import storm_analysis.frc.frc_c - -import storm_analysis.fista.fista_decon_utilities_c -import storm_analysis.fista.fista_fft_c - -import storm_analysis.fista.fista_fft_c - -import storm_analysis.dbscan.dbscan_c - -import storm_analysis.decon_storm.mlem_c - -import storm_analysis.sCMOS.scmos_utilities_c - -import storm_analysis.simulator.zernike_c -import storm_analysis.simulator.drawgaussians - -import storm_analysis.spliner.cubic_spline_c -import storm_analysis.spliner.cubic_fit_c - -import storm_analysis.rolling_ball_bgr.rolling_ball_lib_c - -if platform.system() == 'Windows': - import storm_analysis.sa_library.loadclib as loadclib - loadclib.loadCLibrary("storm_analysis.L1H", "_fista_lib") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_storm") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_sse") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_general") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_common") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea_common") - loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_gpu") \ No newline at end of file diff --git a/tests/test_c_extensions_import.py b/tests/test_c_extensions_import.py new file mode 100644 index 00000000..48b7c765 --- /dev/null +++ b/tests/test_c_extensions_import.py @@ -0,0 +1,46 @@ +# Test every extensions have been correctly build + +import platform + +def test_c_extensions_import(): + # Try to import all C extension + import storm_analysis.sa_library.ia_utilities_c + import storm_analysis.sa_library.multi_fit_c + import storm_analysis.sa_library.grid_c + import storm_analysis.sa_library.matched_filter_c + + import storm_analysis.frc.frc_c + + import storm_analysis.fista.fista_decon_utilities_c + import storm_analysis.fista.fista_fft_c + + import storm_analysis.fista.fista_fft_c + + import storm_analysis.dbscan.dbscan_c + + import storm_analysis.decon_storm.mlem_c + + import storm_analysis.sCMOS.scmos_utilities_c + + import storm_analysis.simulator.zernike_c + import storm_analysis.simulator.drawgaussians + + import storm_analysis.spliner.cubic_spline_c + import storm_analysis.spliner.cubic_fit_c + + import storm_analysis.rolling_ball_bgr.rolling_ball_lib_c + + if platform.system() == 'Windows': + import storm_analysis.sa_library.loadclib as loadclib + loadclib.loadCLibrary("storm_analysis.L1H", "_fista_lib") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_storm") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_sse") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_general") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_common") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_imagea_common") + loadclib.loadCLibrary("storm_analysis.L1H", "_homotopy_gpu") + + +if __name__ == "__main__": + test_c_extensions_import() \ No newline at end of file From 9f6f749d3d2c219bec27a64b5d68b394a33e3444 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 13:59:05 -0400 Subject: [PATCH 09/17] Remove setup.py deps --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bf3b9c36..d2a9180b 100644 --- a/setup.py +++ b/setup.py @@ -126,7 +126,7 @@ def get_c_extensions(): }, include_package_data=True, - requires=['numpy (>=1.8.2)', 'setuptools'], + requires=[], setup_requires=['pytest-runner'], tests_require=['pytest'], From 28f14471ae0bb3543e11c046e57d459065be9557 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 14:19:26 -0400 Subject: [PATCH 10/17] Add scipy and __init__.py --- .gitignore | 2 ++ storm_analysis/decon_storm/__init__.py | 1 + 2 files changed, 3 insertions(+) create mode 100644 storm_analysis/decon_storm/__init__.py diff --git a/.gitignore b/.gitignore index aef4dc40..6887c59b 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,8 @@ *.egg-info build/ *.eggs +dist/ +.cache/ ############# ## Emacs diff --git a/storm_analysis/decon_storm/__init__.py b/storm_analysis/decon_storm/__init__.py new file mode 100644 index 00000000..013e4b7e --- /dev/null +++ b/storm_analysis/decon_storm/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/python From 2355a6b72d4bd886fc4e8de554c4e9355f66a90d Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 14:34:22 -0400 Subject: [PATCH 11/17] Add setup.cfg --- setup.cfg | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..9af7e6f1 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[aliases] +test=pytest \ No newline at end of file From 0cba908971b9c560157bee31608143e7db800244 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Thu, 27 Oct 2016 15:07:06 -0400 Subject: [PATCH 12/17] Move test file --- {tests => storm_analysis/tests}/test_c_extensions_import.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {tests => storm_analysis/tests}/test_c_extensions_import.py (100%) diff --git a/tests/test_c_extensions_import.py b/storm_analysis/tests/test_c_extensions_import.py similarity index 100% rename from tests/test_c_extensions_import.py rename to storm_analysis/tests/test_c_extensions_import.py From c3e6279213ed2385c8537b0bf55524b831f1ae1d Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Fri, 28 Oct 2016 09:02:34 -0400 Subject: [PATCH 13/17] Add data files --- setup.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index d2a9180b..b5634023 100644 --- a/setup.py +++ b/setup.py @@ -116,13 +116,15 @@ def get_c_extensions(): ext_modules=get_c_extensions(), package_data={ #'sample': ['package_data.dat'], - # If any package contains *.txt or *.rst files, include them: - #'': ['*.txt', '*.rst'], - # And include any *.msg files found in the 'hello' package, too: - #'hello': ['*.msg'], + # README and LICENSE + '': ['*.txt', '*.md'], + # Matlab files + 'matlab': ['*.m', '*.mat'], + 'c_files': ['*.c', '*.h'], }, exclude_package_data={ - '': ['*README.txt', '*README.md', '*compile*.sh', '*compile*.bat'] + 'scripts': ['*.sh', '*.bat'], + 'executables': ['*.exe'], }, include_package_data=True, From a87e54824d489ecd53fd091e1c41e32ad424eec1 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Fri, 28 Oct 2016 09:08:51 -0400 Subject: [PATCH 14/17] Integrate sa_utilities extensions --- setup.py | 15 ++++++++------- .../sa_utilities/apply_drift_correction_c.py | 2 +- storm_analysis/sa_utilities/avemlist_c.py | 2 +- storm_analysis/sa_utilities/fitz_c.py | 2 +- storm_analysis/sa_utilities/tracker_c.py | 2 +- storm_analysis/tests/test_c_extensions_import.py | 6 ++++++ 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/setup.py b/setup.py index b5634023..266690b3 100644 --- a/setup.py +++ b/setup.py @@ -71,13 +71,14 @@ def get_c_extensions(): Extension("storm_analysis.rolling_ball_bgr._rolling_ball_lib", ["./storm_analysis/rolling_ball_bgr/rolling_ball_lib.c"], libraries=library_dirs, include_dirs=include_dirs), - # Executables : - # It's hard to integrate executables into a Python package - # The best way to integrate them would be to convert them into libraries. - # Extension("", ["./storm_analysis/sa_utilities/fitz.c"]), - # Extension("", ["./storm_analysis/sa_utilities/tracker.c"]), - # Extension("", ["./storm_analysis/sa_utilities/avemlist.c"]), - # Extension("", ["./storm_analysis/sa_utilities/apply-drift-correction.c"]), + Extension("storm_analysis.sa_utilities._fitz", ["./storm_analysis/sa_utilities/fitz.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.sa_utilities._tracker", ["./storm_analysis/sa_utilities/tracker.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.sa_utilities._avemlist", ["./storm_analysis/sa_utilities/avemlist.c"], + libraries=library_dirs, include_dirs=include_dirs), + Extension("storm_analysis.sa_utilities._apply-drift-correction", ["./storm_analysis/sa_utilities/apply-drift-correction.c"], + libraries=library_dirs, include_dirs=include_dirs), ] if platform.system() == 'Windows': diff --git a/storm_analysis/sa_utilities/apply_drift_correction_c.py b/storm_analysis/sa_utilities/apply_drift_correction_c.py index 1e35a3c3..a5acb515 100644 --- a/storm_analysis/sa_utilities/apply_drift_correction_c.py +++ b/storm_analysis/sa_utilities/apply_drift_correction_c.py @@ -11,7 +11,7 @@ import storm_analysis.sa_library.loadclib as loadclib -adc = loadclib.loadCLibrary(os.path.dirname(__file__), "apply-drift-correction") +adc = loadclib.loadCLibrary("storm_analysis.sa_utilities", "_apply-drift-correction") adc.applyDriftCorrection.argtypes = [ctypes.c_int, ctypes.c_void_p] diff --git a/storm_analysis/sa_utilities/avemlist_c.py b/storm_analysis/sa_utilities/avemlist_c.py index f1f7e63d..ec5fef1d 100644 --- a/storm_analysis/sa_utilities/avemlist_c.py +++ b/storm_analysis/sa_utilities/avemlist_c.py @@ -10,7 +10,7 @@ import storm_analysis.sa_library.loadclib as loadclib -c_avemlist = loadclib.loadCLibrary(os.path.dirname(__file__), "avemlist") +c_avemlist = loadclib.loadCLibrary("storm_analysis.sa_utilities", "_avemlist") c_avemlist.avemlist.argtypes = [ctypes.c_int, ctypes.c_void_p] diff --git a/storm_analysis/sa_utilities/fitz_c.py b/storm_analysis/sa_utilities/fitz_c.py index 36233b67..e8f48032 100644 --- a/storm_analysis/sa_utilities/fitz_c.py +++ b/storm_analysis/sa_utilities/fitz_c.py @@ -11,7 +11,7 @@ import storm_analysis.sa_library.loadclib as loadclib -c_fitz = loadclib.loadCLibrary(os.path.dirname(__file__), "fitz") +c_fitz = loadclib.loadCLibrary("storm_analysis.sa_utilities", "_fitz") c_fitz.fitz.argtypes = [ctypes.c_int, ctypes.c_void_p] diff --git a/storm_analysis/sa_utilities/tracker_c.py b/storm_analysis/sa_utilities/tracker_c.py index cf3f28f2..dcf9dc1d 100644 --- a/storm_analysis/sa_utilities/tracker_c.py +++ b/storm_analysis/sa_utilities/tracker_c.py @@ -11,7 +11,7 @@ import storm_analysis.sa_library.loadclib as loadclib -c_tracker = loadclib.loadCLibrary(os.path.dirname(__file__), "tracker") +c_tracker = loadclib.loadCLibrary("storm_analysis.sa_utilities", "_tracker") c_tracker.tracker.argtypes = [ctypes.c_int, ctypes.c_void_p] diff --git a/storm_analysis/tests/test_c_extensions_import.py b/storm_analysis/tests/test_c_extensions_import.py index 48b7c765..7052d8fb 100644 --- a/storm_analysis/tests/test_c_extensions_import.py +++ b/storm_analysis/tests/test_c_extensions_import.py @@ -4,6 +4,7 @@ def test_c_extensions_import(): # Try to import all C extension + import storm_analysis.sa_library.ia_utilities_c import storm_analysis.sa_library.multi_fit_c import storm_analysis.sa_library.grid_c @@ -30,6 +31,11 @@ def test_c_extensions_import(): import storm_analysis.rolling_ball_bgr.rolling_ball_lib_c + import storm_analysis.sa_utilities.fitz_c + import storm_analysis.sa_utilities.apply_drift_correction_c + import storm_analysis.sa_utilities.avemlist_c + import storm_analysis.sa_utilities.tracker_c + if platform.system() == 'Windows': import storm_analysis.sa_library.loadclib as loadclib loadclib.loadCLibrary("storm_analysis.L1H", "_fista_lib") From 8b3da436ff79973e3326cd8c72c5eed87049e469 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Fri, 28 Oct 2016 09:24:19 -0400 Subject: [PATCH 15/17] Rename test and add MANIFEST.in --- MANIFEST.in | 12 ++++++++++++ setup.py | 12 +++++------- storm_analysis/{tests => test/data}/LICENSE.txt | 0 storm_analysis/{tests => test/data}/README.txt | 0 storm_analysis/{tests => test/data}/calib.npy | Bin storm_analysis/{tests => test/data}/results.txt | 0 storm_analysis/{tests => test/data}/test.dax | Bin storm_analysis/{tests => test/data}/test.inf | 0 storm_analysis/{tests => test/data}/test_3d_2d.xml | 0 .../{tests => test/data}/test_3d_2d_fixed.xml | 0 .../data}/test_3d_2d_fixed_low_snr.xml | 0 storm_analysis/{tests => test/data}/test_3d_3d.xml | 0 storm_analysis/{tests => test/data}/test_3d_Z.xml | 0 storm_analysis/{tests => test/data}/test_bg_sub.dax | Bin storm_analysis/{tests => test/data}/test_bg_sub.inf | 0 storm_analysis/{tests => test/data}/test_drift.xml | 0 .../{tests => test/data}/test_drift_mlist.bin | Bin storm_analysis/{tests => test/data}/test_l1h.dax | Bin storm_analysis/{tests => test/data}/test_l1h.inf | 0 storm_analysis/{tests => test/data}/test_l1h.xml | 0 .../{tests => test/data}/test_l1h_olist.bin | Bin .../{tests => test/data}/test_low_snr.dax | Bin .../{tests => test/data}/test_low_snr.inf | 0 .../{tests => test/data}/test_low_snr_list.bin | Bin storm_analysis/{tests => test/data}/test_olist.bin | Bin storm_analysis/{tests => test/data}/test_sc_2d.xml | 0 .../{tests => test/data}/test_sc_2d_fixed.xml | 0 storm_analysis/{tests => test/data}/test_sc_3d.xml | 0 storm_analysis/{tests => test/data}/test_sc_Z.xml | 0 .../{tests => test/data}/test_spliner.dax | Bin .../{tests => test/data}/test_spliner.inf | 0 .../{tests => test/data}/test_spliner_dh.xml | 0 .../{tests => test/data}/test_spliner_dh_fista.xml | 0 .../{tests => test/data}/test_spliner_olist.bin | Bin storm_analysis/{tests => test}/run_tests.py | 0 .../{tests => test}/test_c_extensions_import.py | 0 36 files changed, 17 insertions(+), 7 deletions(-) create mode 100644 MANIFEST.in rename storm_analysis/{tests => test/data}/LICENSE.txt (100%) rename storm_analysis/{tests => test/data}/README.txt (100%) rename storm_analysis/{tests => test/data}/calib.npy (100%) rename storm_analysis/{tests => test/data}/results.txt (100%) rename storm_analysis/{tests => test/data}/test.dax (100%) rename storm_analysis/{tests => test/data}/test.inf (100%) rename storm_analysis/{tests => test/data}/test_3d_2d.xml (100%) rename storm_analysis/{tests => test/data}/test_3d_2d_fixed.xml (100%) rename storm_analysis/{tests => test/data}/test_3d_2d_fixed_low_snr.xml (100%) rename storm_analysis/{tests => test/data}/test_3d_3d.xml (100%) rename storm_analysis/{tests => test/data}/test_3d_Z.xml (100%) rename storm_analysis/{tests => test/data}/test_bg_sub.dax (100%) rename storm_analysis/{tests => test/data}/test_bg_sub.inf (100%) rename storm_analysis/{tests => test/data}/test_drift.xml (100%) rename storm_analysis/{tests => test/data}/test_drift_mlist.bin (100%) rename storm_analysis/{tests => test/data}/test_l1h.dax (100%) rename storm_analysis/{tests => test/data}/test_l1h.inf (100%) rename storm_analysis/{tests => test/data}/test_l1h.xml (100%) rename storm_analysis/{tests => test/data}/test_l1h_olist.bin (100%) rename storm_analysis/{tests => test/data}/test_low_snr.dax (100%) rename storm_analysis/{tests => test/data}/test_low_snr.inf (100%) rename storm_analysis/{tests => test/data}/test_low_snr_list.bin (100%) rename storm_analysis/{tests => test/data}/test_olist.bin (100%) rename storm_analysis/{tests => test/data}/test_sc_2d.xml (100%) rename storm_analysis/{tests => test/data}/test_sc_2d_fixed.xml (100%) rename storm_analysis/{tests => test/data}/test_sc_3d.xml (100%) rename storm_analysis/{tests => test/data}/test_sc_Z.xml (100%) rename storm_analysis/{tests => test/data}/test_spliner.dax (100%) rename storm_analysis/{tests => test/data}/test_spliner.inf (100%) rename storm_analysis/{tests => test/data}/test_spliner_dh.xml (100%) rename storm_analysis/{tests => test/data}/test_spliner_dh_fista.xml (100%) rename storm_analysis/{tests => test/data}/test_spliner_olist.bin (100%) rename storm_analysis/{tests => test}/run_tests.py (100%) rename storm_analysis/{tests => test}/test_c_extensions_import.py (100%) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..9cd28b59 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,12 @@ +recursive-include storm_analysis *.py +recursive-include storm_analysis *.ui +recursive-include storm_analysis *.txt +recursive-include storm_analysis *.md +recursive-include storm_analysis *.m +recursive-include storm_analysis *.mat +recursive-include storm_analysis *.c +recursive-include storm_analysis *.h +recursive-include storm_analysis */data/* +recursive-include storm_analysis */sample_data/* +recursive-include storm_analysis *tests* + diff --git a/setup.py b/setup.py index 266690b3..b7438188 100644 --- a/setup.py +++ b/setup.py @@ -116,16 +116,14 @@ def get_c_extensions(): ext_modules=get_c_extensions(), package_data={ - #'sample': ['package_data.dat'], - # README and LICENSE '': ['*.txt', '*.md'], - # Matlab files - 'matlab': ['*.m', '*.mat'], - 'c_files': ['*.c', '*.h'], + + '': ['*.m', '*.mat'], + '': ['*.c', '*.h'], + '': ['*tests*'], + '': ['*data*'], }, exclude_package_data={ - 'scripts': ['*.sh', '*.bat'], - 'executables': ['*.exe'], }, include_package_data=True, diff --git a/storm_analysis/tests/LICENSE.txt b/storm_analysis/test/data/LICENSE.txt similarity index 100% rename from storm_analysis/tests/LICENSE.txt rename to storm_analysis/test/data/LICENSE.txt diff --git a/storm_analysis/tests/README.txt b/storm_analysis/test/data/README.txt similarity index 100% rename from storm_analysis/tests/README.txt rename to storm_analysis/test/data/README.txt diff --git a/storm_analysis/tests/calib.npy b/storm_analysis/test/data/calib.npy similarity index 100% rename from storm_analysis/tests/calib.npy rename to storm_analysis/test/data/calib.npy diff --git a/storm_analysis/tests/results.txt b/storm_analysis/test/data/results.txt similarity index 100% rename from storm_analysis/tests/results.txt rename to storm_analysis/test/data/results.txt diff --git a/storm_analysis/tests/test.dax b/storm_analysis/test/data/test.dax similarity index 100% rename from storm_analysis/tests/test.dax rename to storm_analysis/test/data/test.dax diff --git a/storm_analysis/tests/test.inf b/storm_analysis/test/data/test.inf similarity index 100% rename from storm_analysis/tests/test.inf rename to storm_analysis/test/data/test.inf diff --git a/storm_analysis/tests/test_3d_2d.xml b/storm_analysis/test/data/test_3d_2d.xml similarity index 100% rename from storm_analysis/tests/test_3d_2d.xml rename to storm_analysis/test/data/test_3d_2d.xml diff --git a/storm_analysis/tests/test_3d_2d_fixed.xml b/storm_analysis/test/data/test_3d_2d_fixed.xml similarity index 100% rename from storm_analysis/tests/test_3d_2d_fixed.xml rename to storm_analysis/test/data/test_3d_2d_fixed.xml diff --git a/storm_analysis/tests/test_3d_2d_fixed_low_snr.xml b/storm_analysis/test/data/test_3d_2d_fixed_low_snr.xml similarity index 100% rename from storm_analysis/tests/test_3d_2d_fixed_low_snr.xml rename to storm_analysis/test/data/test_3d_2d_fixed_low_snr.xml diff --git a/storm_analysis/tests/test_3d_3d.xml b/storm_analysis/test/data/test_3d_3d.xml similarity index 100% rename from storm_analysis/tests/test_3d_3d.xml rename to storm_analysis/test/data/test_3d_3d.xml diff --git a/storm_analysis/tests/test_3d_Z.xml b/storm_analysis/test/data/test_3d_Z.xml similarity index 100% rename from storm_analysis/tests/test_3d_Z.xml rename to storm_analysis/test/data/test_3d_Z.xml diff --git a/storm_analysis/tests/test_bg_sub.dax b/storm_analysis/test/data/test_bg_sub.dax similarity index 100% rename from storm_analysis/tests/test_bg_sub.dax rename to storm_analysis/test/data/test_bg_sub.dax diff --git a/storm_analysis/tests/test_bg_sub.inf b/storm_analysis/test/data/test_bg_sub.inf similarity index 100% rename from storm_analysis/tests/test_bg_sub.inf rename to storm_analysis/test/data/test_bg_sub.inf diff --git a/storm_analysis/tests/test_drift.xml b/storm_analysis/test/data/test_drift.xml similarity index 100% rename from storm_analysis/tests/test_drift.xml rename to storm_analysis/test/data/test_drift.xml diff --git a/storm_analysis/tests/test_drift_mlist.bin b/storm_analysis/test/data/test_drift_mlist.bin similarity index 100% rename from storm_analysis/tests/test_drift_mlist.bin rename to storm_analysis/test/data/test_drift_mlist.bin diff --git a/storm_analysis/tests/test_l1h.dax b/storm_analysis/test/data/test_l1h.dax similarity index 100% rename from storm_analysis/tests/test_l1h.dax rename to storm_analysis/test/data/test_l1h.dax diff --git a/storm_analysis/tests/test_l1h.inf b/storm_analysis/test/data/test_l1h.inf similarity index 100% rename from storm_analysis/tests/test_l1h.inf rename to storm_analysis/test/data/test_l1h.inf diff --git a/storm_analysis/tests/test_l1h.xml b/storm_analysis/test/data/test_l1h.xml similarity index 100% rename from storm_analysis/tests/test_l1h.xml rename to storm_analysis/test/data/test_l1h.xml diff --git a/storm_analysis/tests/test_l1h_olist.bin b/storm_analysis/test/data/test_l1h_olist.bin similarity index 100% rename from storm_analysis/tests/test_l1h_olist.bin rename to storm_analysis/test/data/test_l1h_olist.bin diff --git a/storm_analysis/tests/test_low_snr.dax b/storm_analysis/test/data/test_low_snr.dax similarity index 100% rename from storm_analysis/tests/test_low_snr.dax rename to storm_analysis/test/data/test_low_snr.dax diff --git a/storm_analysis/tests/test_low_snr.inf b/storm_analysis/test/data/test_low_snr.inf similarity index 100% rename from storm_analysis/tests/test_low_snr.inf rename to storm_analysis/test/data/test_low_snr.inf diff --git a/storm_analysis/tests/test_low_snr_list.bin b/storm_analysis/test/data/test_low_snr_list.bin similarity index 100% rename from storm_analysis/tests/test_low_snr_list.bin rename to storm_analysis/test/data/test_low_snr_list.bin diff --git a/storm_analysis/tests/test_olist.bin b/storm_analysis/test/data/test_olist.bin similarity index 100% rename from storm_analysis/tests/test_olist.bin rename to storm_analysis/test/data/test_olist.bin diff --git a/storm_analysis/tests/test_sc_2d.xml b/storm_analysis/test/data/test_sc_2d.xml similarity index 100% rename from storm_analysis/tests/test_sc_2d.xml rename to storm_analysis/test/data/test_sc_2d.xml diff --git a/storm_analysis/tests/test_sc_2d_fixed.xml b/storm_analysis/test/data/test_sc_2d_fixed.xml similarity index 100% rename from storm_analysis/tests/test_sc_2d_fixed.xml rename to storm_analysis/test/data/test_sc_2d_fixed.xml diff --git a/storm_analysis/tests/test_sc_3d.xml b/storm_analysis/test/data/test_sc_3d.xml similarity index 100% rename from storm_analysis/tests/test_sc_3d.xml rename to storm_analysis/test/data/test_sc_3d.xml diff --git a/storm_analysis/tests/test_sc_Z.xml b/storm_analysis/test/data/test_sc_Z.xml similarity index 100% rename from storm_analysis/tests/test_sc_Z.xml rename to storm_analysis/test/data/test_sc_Z.xml diff --git a/storm_analysis/tests/test_spliner.dax b/storm_analysis/test/data/test_spliner.dax similarity index 100% rename from storm_analysis/tests/test_spliner.dax rename to storm_analysis/test/data/test_spliner.dax diff --git a/storm_analysis/tests/test_spliner.inf b/storm_analysis/test/data/test_spliner.inf similarity index 100% rename from storm_analysis/tests/test_spliner.inf rename to storm_analysis/test/data/test_spliner.inf diff --git a/storm_analysis/tests/test_spliner_dh.xml b/storm_analysis/test/data/test_spliner_dh.xml similarity index 100% rename from storm_analysis/tests/test_spliner_dh.xml rename to storm_analysis/test/data/test_spliner_dh.xml diff --git a/storm_analysis/tests/test_spliner_dh_fista.xml b/storm_analysis/test/data/test_spliner_dh_fista.xml similarity index 100% rename from storm_analysis/tests/test_spliner_dh_fista.xml rename to storm_analysis/test/data/test_spliner_dh_fista.xml diff --git a/storm_analysis/tests/test_spliner_olist.bin b/storm_analysis/test/data/test_spliner_olist.bin similarity index 100% rename from storm_analysis/tests/test_spliner_olist.bin rename to storm_analysis/test/data/test_spliner_olist.bin diff --git a/storm_analysis/tests/run_tests.py b/storm_analysis/test/run_tests.py similarity index 100% rename from storm_analysis/tests/run_tests.py rename to storm_analysis/test/run_tests.py diff --git a/storm_analysis/tests/test_c_extensions_import.py b/storm_analysis/test/test_c_extensions_import.py similarity index 100% rename from storm_analysis/tests/test_c_extensions_import.py rename to storm_analysis/test/test_c_extensions_import.py From cd150219060679b7f58612e4b53933701d51a2e7 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Fri, 28 Oct 2016 09:29:22 -0400 Subject: [PATCH 16/17] Tweak setup.py --- setup.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index b7438188..2a7fbb72 100644 --- a/setup.py +++ b/setup.py @@ -115,16 +115,8 @@ def get_c_extensions(): packages=find_packages(), ext_modules=get_c_extensions(), - package_data={ - '': ['*.txt', '*.md'], - - '': ['*.m', '*.mat'], - '': ['*.c', '*.h'], - '': ['*tests*'], - '': ['*data*'], - }, - exclude_package_data={ - }, + package_data={}, + exclude_package_data={}, include_package_data=True, requires=[], From 50d480192a2171c55da788ba24e4d29431cc1cd6 Mon Sep 17 00:00:00 2001 From: Hadrien Mary Date: Fri, 28 Oct 2016 10:20:11 -0400 Subject: [PATCH 17/17] More tests --- .gitignore | 1 + MANIFEST.in | 2 + storm_analysis/__init__.py | 23 ++ storm_analysis/frc/frc_calc2d.py | 168 +++++---- storm_analysis/rcc/rcc-drift-correction.py | 329 ----------------- storm_analysis/rcc/rcc_drift_correction.py | 338 ++++++++++++++++++ storm_analysis/test/run_tests.py | 17 +- .../test/test_c_extensions_import.py | 2 +- storm_analysis/test/test_frc.py | 14 + storm_analysis/test/test_rcc.py | 14 + 10 files changed, 496 insertions(+), 412 deletions(-) delete mode 100644 storm_analysis/rcc/rcc-drift-correction.py create mode 100644 storm_analysis/rcc/rcc_drift_correction.py create mode 100644 storm_analysis/test/test_frc.py create mode 100644 storm_analysis/test/test_rcc.py diff --git a/.gitignore b/.gitignore index 6887c59b..2fc01b86 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ build/ *.eggs dist/ .cache/ +storm_analysis/test/output/ ############# ## Emacs diff --git a/MANIFEST.in b/MANIFEST.in index 9cd28b59..da9d6761 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -10,3 +10,5 @@ recursive-include storm_analysis */data/* recursive-include storm_analysis */sample_data/* recursive-include storm_analysis *tests* +prune storm_analysis/test/output + diff --git a/storm_analysis/__init__.py b/storm_analysis/__init__.py index 013e4b7e..1c4152d6 100644 --- a/storm_analysis/__init__.py +++ b/storm_analysis/__init__.py @@ -1 +1,24 @@ #!/usr/bin/python +import os + + +def get_data(data_path): + import pkg_resources + data = pkg_resources.resource_filename(__name__, data_path) + return data + + +def get_path(path): + return os.path.join(os.path.dirname(os.path.abspath(__file__)), path) + + +def get_path_output_test(fname=None): + out_path = get_path("test/output/") + + if not os.path.exists(out_path): + os.makedirs(out_path) + + if fname: + return os.path.join(out_path, fname) + else: + return out_path \ No newline at end of file diff --git a/storm_analysis/frc/frc_calc2d.py b/storm_analysis/frc/frc_calc2d.py index 3a892af2..b2c6116d 100644 --- a/storm_analysis/frc/frc_calc2d.py +++ b/storm_analysis/frc/frc_calc2d.py @@ -12,91 +12,103 @@ import matplotlib.pyplot as pyplot import numpy import sys +import argparse import storm_analysis.frc.frc_c as frcC import storm_analysis.sa_library.arraytoimage as arraytoimage import storm_analysis.sa_library.i3togrid as i3togrid import storm_analysis.sa_library.readinsight3 as readinsight3 -pixel_size = 160.0 -storm_scale = 8 - -if (len(sys.argv) < 3): - print("usage: ") - exit() - -# Load the data. -i3_grid = i3togrid.I3GData(sys.argv[1], scale = storm_scale) - -# Split the data (approximately) in half & generate 2D histograms. -print("Searching for mid-point") - -# For simulations the .dax file might not actually have as many -# frames as the molecule list so use a hack to get the number of -# frames in the molecule list. -max_f = int(numpy.max(i3_grid.i3data['fr'])) + 1 -locs = round(numpy.sum(i3_grid.i3To2DGridAllChannelsMerged(fmax = max_f))) - -start = 0 -end = max_f -half_locs = locs/2 -while ((end - start) > 1): - mid = (end - start)/2 + start - print(" ", start, mid, end) - grid1 = i3_grid.i3To2DGridAllChannelsMerged(fmin = 0, fmax = mid) - if (numpy.sum(grid1) < half_locs): - start = mid - else: - end = mid - -print(" mid-point:", end) -grid1 = i3_grid.i3To2DGridAllChannelsMerged(fmin = 0, fmax = end) -grid2 = i3_grid.i3To2DGridAllChannelsMerged(fmin = end, fmax = max_f) - -# Compute FFT -print("Calculating") -grid1_fft = numpy.fft.fftshift(numpy.fft.fft2(grid1)) -grid2_fft = numpy.fft.fftshift(numpy.fft.fft2(grid2)) - -grid1_fft_sqr = grid1_fft * numpy.conj(grid1_fft) -grid2_fft_sqr = grid2_fft * numpy.conj(grid2_fft) -grid1_grid2 = grid1_fft * numpy.conj(grid2_fft) - -if 1: - arraytoimage.singleColorImage(numpy.abs(grid1_fft), "grid1") - arraytoimage.singleColorImage(numpy.abs(grid2_fft), "grid2") - -[frc, frc_counts] = frcC.frc(grid1_fft, grid2_fft) - -# Plot results -for i in range(frc.size): - if (frc_counts[i] > 0): - frc[i] = frc[i]/float(frc_counts[i]) - else: - frc[i] = 0.0 - -xvals = numpy.arange(frc.size) -xvals = xvals/(float(grid1_fft.shape[0]) * pixel_size * (1.0/float(storm_scale))) -frc = numpy.real(frc) - -fp = open(sys.argv[2], "w") -for i in range(xvals.size): - fp.write(str(xvals[i]) + "," + str(frc[i]) + "\n") -fp.close() - -if (len(sys.argv) == 4) and (sys.argv[3] == "0"): - exit() - -fig = pyplot.figure() -ax = fig.add_subplot(111) -ax.scatter(xvals, frc) -pyplot.xlim([xvals[0], xvals[-1]]) -pyplot.ylim([-0.2,1.2]) -pyplot.xlabel("Spatial Frequency (nm-1)") -pyplot.ylabel("Correlation") -pyplot.show() -# +def calc2d(in_list, results, show_plot): + + pixel_size = 160.0 + storm_scale = 8 + + # Load the data. + i3_grid = i3togrid.I3GData(in_list, scale = storm_scale) + + # Split the data (approximately) in half & generate 2D histograms. + print("Searching for mid-point") + + # For simulations the .dax file might not actually have as many + # frames as the molecule list so use a hack to get the number of + # frames in the molecule list. + max_f = int(numpy.max(i3_grid.i3data['fr'])) + 1 + locs = round(numpy.sum(i3_grid.i3To2DGridAllChannelsMerged(fmax = max_f))) + + start = 0 + end = max_f + half_locs = locs/2 + while ((end - start) > 1): + mid = (end - start)/2 + start + print(" ", start, mid, end) + grid1 = i3_grid.i3To2DGridAllChannelsMerged(fmin = 0, fmax = mid) + if (numpy.sum(grid1) < half_locs): + start = mid + else: + end = mid + + print(" mid-point:", end) + grid1 = i3_grid.i3To2DGridAllChannelsMerged(fmin = 0, fmax = end) + grid2 = i3_grid.i3To2DGridAllChannelsMerged(fmin = end, fmax = max_f) + + # Compute FFT + print("Calculating") + grid1_fft = numpy.fft.fftshift(numpy.fft.fft2(grid1)) + grid2_fft = numpy.fft.fftshift(numpy.fft.fft2(grid2)) + + grid1_fft_sqr = grid1_fft * numpy.conj(grid1_fft) + grid2_fft_sqr = grid2_fft * numpy.conj(grid2_fft) + grid1_grid2 = grid1_fft * numpy.conj(grid2_fft) + + if show_plot: + arraytoimage.singleColorImage(numpy.abs(grid1_fft), "grid1") + arraytoimage.singleColorImage(numpy.abs(grid2_fft), "grid2") + + [frc, frc_counts] = frcC.frc(grid1_fft, grid2_fft) + + # Plot results + for i in range(frc.size): + if (frc_counts[i] > 0): + frc[i] = frc[i]/float(frc_counts[i]) + else: + frc[i] = 0.0 + + xvals = numpy.arange(frc.size) + xvals = xvals/(float(grid1_fft.shape[0]) * pixel_size * (1.0/float(storm_scale))) + frc = numpy.real(frc) + + fp = open(results, "w") + for i in range(xvals.size): + fp.write(str(xvals[i]) + "," + str(frc[i]) + "\n") + fp.close() + + if show_plot: + + fig = pyplot.figure() + ax = fig.add_subplot(111) + ax.scatter(xvals, frc) + pyplot.xlim([xvals[0], xvals[-1]]) + pyplot.ylim([-0.2,1.2]) + pyplot.xlabel("Spatial Frequency (nm-1)") + pyplot.ylabel("Correlation") + pyplot.show() + + +if __name__ == "__main__": + + + parser = argparse.ArgumentParser(description='Calculate 2D FRC following Nieuwenhuizen, Nature Methods, 2013') + + parser.add_argument('--in', dest='in_list', type=str, required=True) + parser.add_argument('--res', dest='results', type=str, required=True) + parser.add_argument('--plot', dest='show_plot', type=bool, required=False, default=False) + + args = parser.parse_args() + + calc2d(args.in_list, args.results, args.show_plot) + # The MIT License # # Copyright (c) 2014 Zhuang Lab, Harvard University diff --git a/storm_analysis/rcc/rcc-drift-correction.py b/storm_analysis/rcc/rcc-drift-correction.py deleted file mode 100644 index 9c3eac44..00000000 --- a/storm_analysis/rcc/rcc-drift-correction.py +++ /dev/null @@ -1,329 +0,0 @@ -#!/usr/bin/python -# -# A Python implementation of the drift algorithm described in this reference: -# -# "Localization events-based sample drift correction for localization microscopy with redundant cross-correlation algorithm", -# Wang et al. Optics Express, 30 June 2014, Vol. 22, No. 13, DOI:10.1364/OE.22.015982. -# -# This uses the above algorithm for XY correction, then falls back to old -# approach for the Z correction. -# -# Hazen 09/14 -# - -import numpy -import os -import pickle -import scipy.interpolate -import scipy.signal -import sys - -import storm_analysis.sa_library.arraytoimage as arraytoimage -import storm_analysis.sa_library.driftutilities as driftutilities -import storm_analysis.sa_library.i3togrid as i3togrid -import storm_analysis.sa_library.imagecorrelation as imagecorrelation - - -# Setup -if (len(sys.argv) < 5): - print("usage: ") - exit() - -step = int(sys.argv[3]) -scale = int(sys.argv[4]) -i3_data = i3togrid.I3GDataLL(sys.argv[1], scale = scale) -film_l = i3_data.getFilmLength() -max_err = 0.2 - -correct_z = True -if (len(sys.argv) > 5): - correct_z = False - -# Sub-routines. -def saveDriftData(fdx, fdy, fdz): - driftutilities.saveDriftData(sys.argv[2], fdx, fdy, fdz) - -def interpolateData(xvals, yvals): - return driftutilities.interpolateData(xvals, yvals, film_l) - -# Don't analyze films that are too short. -if (4 * step > film_l): - saveDriftData(numpy.zeros(film_l), - numpy.zeros(film_l), - numpy.zeros(film_l)) - exit() - -print("Performing XY correction.") - -# Compute offsets between all pairs of sub images. -endpost = film_l - step/2 -old_start1 = -1 -start1 = 0 -end1 = start1 + step -start2 = start1 -end2 = start2 + step -i = 0 -j = 0 -centers = [(end1 - start1)/2 + start1] -pairs = [] -while (start1 < endpost): - - if (start2 > endpost): - i += 1 - j = i - start1 += step - end1 = start1 + step - start2 = start1 - end2 = start2 + step - if (end1 > endpost): - end1 = film_l - if (end2 > endpost): - end2 = film_l - if (start1 < endpost): - centers.append((end1 - start1)/2 + start1) - - if (start1 > endpost): - continue - - if not (start1 == start2): - if (old_start1 != start1): - i3_data.loadDataInFrames(fmin = start1, fmax = end1-1) - sub1 = i3_data.i3To2DGridAllChannelsMerged(uncorrected = True) - old_start1 = start1 - - i3_data.loadDataInFrames(fmin = start2, fmax = end2-1) - sub2 = i3_data.i3To2DGridAllChannelsMerged(uncorrected = True) - - [corr, dx, dy, success] = imagecorrelation.xyOffset(sub1, - sub2, - scale) - - dx = dx/float(scale) - dy = dy/float(scale) - - print("offset between frame ranges ", start1, "-" , end1 , " and ", start2, "-", end2) - - if success: - print(" -> ", dx, dy, "good") - else: - print(" -> ", dx, dy, "bad") - print("") - - pairs.append([i, j, dx, dy, success]) - - j += 1 - start2 += step - end2 = start2 + step - if (end2 > endpost): - end2 = film_l - - -print("--") - -# -# For testing it is faster to not have to re-run the -# XY drift correction calculations. -# -#with open("test.dat", "w") as fp: -# pickle.dump([centers, pairs], fp) -# -#with open("test.dat") as fp: -# [centers, pairs] = pickle.load(fp) -# - -# Prepare rij_x, rij_y, A matrix. -rij_x = numpy.zeros(len(pairs), dtype = numpy.float32) -rij_y = numpy.zeros(len(pairs), dtype = numpy.float32) -A = numpy.zeros((len(pairs),len(centers)), dtype = numpy.float32) -for i, pair in enumerate(pairs): - rij_x[i] = pair[2] - rij_y[i] = pair[3] - A[i,pair[0]:pair[1]] = 1.0 - -# Calculate drift (pass1). -# dx and dy contain the optimal offset between sub image i and sub image i+1 in x/y. -pinv_A = numpy.linalg.pinv(A) -dx = numpy.dot(pinv_A, rij_x) -dy = numpy.dot(pinv_A, rij_y) - -# Calculate errors. -err_x = numpy.dot(A, dx) - rij_x -err_y = numpy.dot(A, dy) - rij_y - -err_d = numpy.sqrt(err_x * err_x + err_y * err_y) -arg_sort_err = numpy.argsort(err_d) - -# Print errors before. -if 0: - print("Before:") - for i in range(err_d.size): - print(i, rij_x[i], rij_y[i], A[i,:], err_d[i]) - print("") - -# Remove bad values. -j = len(arg_sort_err) - 1 - -while (j > 0) and (err_d[arg_sort_err[j]] > max_err): - index = arg_sort_err[j] - delA = numpy.delete(A, index, 0) - if (numpy.linalg.matrix_rank(delA, tol = 1.0) == (len(centers)-1)): - print(j, "removing", index, "with error", err_d[index]) - A = delA - rij_x = numpy.delete(rij_x, index, 0) - rij_y = numpy.delete(rij_y, index, 0) - err_d = numpy.delete(err_d, index, 0) - arg_sort_err[(arg_sort_err > index)] -= 1 - else: - print("not removing", index, "with error", err_d[index]) - j -= 1 - -# Print errors after. -if 0: - print("") - print("After:") - for i in range(err_d.size): - print(i, rij_x[i], rij_y[i], A[i,:], err_d[i]) - print("") - -# Calculate drift (pass2). -pinv_A = numpy.linalg.pinv(A) -dx = numpy.dot(pinv_A, rij_x) -dy = numpy.dot(pinv_A, rij_y) - - -# Integrate to get final drift. -driftx = numpy.zeros((dx.size)) -drifty = numpy.zeros((dy.size)) -for i in range(dx.size): - driftx[i] = numpy.sum(dx[0:i]) - drifty[i] = numpy.sum(dy[0:i]) - -if 1: - for i in range(driftx.size): - print(i, centers[i], driftx[i], drifty[i]) - -# Create spline for interpolation. -final_driftx = interpolateData(centers, driftx) -final_drifty = interpolateData(centers, drifty) - -# Plot XY drift. -if 0: - import matplotlib - import matplotlib.pyplot as pyplot - - x = numpy.arange(film_l) - fig = pyplot.figure() - ax = fig.add_subplot(111) - ax.plot(x, final_driftx, color = 'blue') - ax.plot(x, final_drifty, color = 'red') - pyplot.show() - -# Z correction. -if not correct_z: - saveDriftData(final_driftx, - final_drifty, - numpy.zeros(film_l)) - exit() - -print("") -print("Performing Z Correction.") - -start = 0 -z_bins = 20 -i3_data.loadDataInFrames(fmin = start, fmax = start+step) - -if correct_z: - z_bins = 20 - xyzmaster = i3_data.i3To3DGridAllChannelsMerged(z_bins, - uncorrected = True) - -j = 0 -index = 0 -old_dz = 0.0 -driftz = numpy.zeros((dx.size)) -while(j < film_l): - - # Load correct frame range. - if ((j + 2*step) >= film_l): - i3_data.loadDataInFrames(fmin = j) - step_step = 2*step - else: - i3_data.loadDataInFrames(fmin = j, fmax = j + step) - step_step = step - - # Apply XY drift correction. - i3_data.applyXYDriftCorrection(driftx[index], drifty[index]) - - # Z correlation - dz = old_dz - - xyzcurr = i3_data.i3To3DGridAllChannelsMerged(z_bins, - uncorrected = True) - - [corr, fit, dz, z_success] = imagecorrelation.zOffset(xyzmaster, xyzcurr) - - # Update Values - if z_success: - old_dz = dz - else: - dz = old_dz - - dz = dz * 1000.0/float(z_bins) - - if z_success: - i3_data.applyZDriftCorrection(-dz) - xyzmaster += i3_data.i3To3DGridAllChannelsMerged(z_bins) - - driftz[index] = dz - - if z_success: - print(index, dz, "good") - else: - print(index, dz, "bad") - - index += 1 - j += step_step - -final_driftz = interpolateData(centers, driftz) - -saveDriftData(final_driftx, - final_drifty, - final_driftz) - -# Plot X,Y, Z drift. -if False: - import matplotlib - import matplotlib.pyplot as pyplot - - pixel_size = 160.0 # pixel size in nm. - x = numpy.arange(film_l) - fig = pyplot.figure() - ax = fig.add_subplot(111) - ax.plot(x, pixel_size * final_driftx, color = 'red') - ax.plot(x, pixel_size * final_drifty, color = 'green') - ax.plot(x, final_driftz, color = 'blue') - pyplot.show() - -# -# The MIT License -# -# Copyright (c) 2014 Zhuang Lab, Harvard University -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -# diff --git a/storm_analysis/rcc/rcc_drift_correction.py b/storm_analysis/rcc/rcc_drift_correction.py new file mode 100644 index 00000000..32b7c412 --- /dev/null +++ b/storm_analysis/rcc/rcc_drift_correction.py @@ -0,0 +1,338 @@ +#!/usr/bin/python +# +# A Python implementation of the drift algorithm described in this reference: +# +# "Localization events-based sample drift correction for localization microscopy with redundant cross-correlation algorithm", +# Wang et al. Optics Express, 30 June 2014, Vol. 22, No. 13, DOI:10.1364/OE.22.015982. +# +# This uses the above algorithm for XY correction, then falls back to old +# approach for the Z correction. +# +# Hazen 09/14 +# + +import numpy +import os +import pickle +import scipy.interpolate +import scipy.signal +import sys +import argparse + +import storm_analysis.sa_library.arraytoimage as arraytoimage +import storm_analysis.sa_library.driftutilities as driftutilities +import storm_analysis.sa_library.i3togrid as i3togrid +import storm_analysis.sa_library.imagecorrelation as imagecorrelation + + +def rcc(bin, drift, step, scale, correct_z=True, show_plot=False): + + i3_data = i3togrid.I3GDataLL(bin, scale = scale) + film_l = i3_data.getFilmLength() + max_err = 0.2 + + # Sub-routines. + def saveDriftData(fdx, fdy, fdz): + driftutilities.saveDriftData(drift, fdx, fdy, fdz) + + def interpolateData(xvals, yvals): + return driftutilities.interpolateData(xvals, yvals, film_l) + + # Don't analyze films that are too short. + if (4 * step > film_l): + saveDriftData(numpy.zeros(film_l), + numpy.zeros(film_l), + numpy.zeros(film_l)) + exit() + + print("Performing XY correction.") + + # Compute offsets between all pairs of sub images. + endpost = film_l - step/2 + old_start1 = -1 + start1 = 0 + end1 = start1 + step + start2 = start1 + end2 = start2 + step + i = 0 + j = 0 + centers = [(end1 - start1)/2 + start1] + pairs = [] + while (start1 < endpost): + + if (start2 > endpost): + i += 1 + j = i + start1 += step + end1 = start1 + step + start2 = start1 + end2 = start2 + step + if (end1 > endpost): + end1 = film_l + if (end2 > endpost): + end2 = film_l + if (start1 < endpost): + centers.append((end1 - start1)/2 + start1) + + if (start1 > endpost): + continue + + if not (start1 == start2): + if (old_start1 != start1): + i3_data.loadDataInFrames(fmin = start1, fmax = end1-1) + sub1 = i3_data.i3To2DGridAllChannelsMerged(uncorrected = True) + old_start1 = start1 + + i3_data.loadDataInFrames(fmin = start2, fmax = end2-1) + sub2 = i3_data.i3To2DGridAllChannelsMerged(uncorrected = True) + + [corr, dx, dy, success] = imagecorrelation.xyOffset(sub1, + sub2, + scale) + + dx = dx/float(scale) + dy = dy/float(scale) + + print("offset between frame ranges ", start1, "-" , end1 , " and ", start2, "-", end2) + + if success: + print(" -> ", dx, dy, "good") + else: + print(" -> ", dx, dy, "bad") + print("") + + pairs.append([i, j, dx, dy, success]) + + j += 1 + start2 += step + end2 = start2 + step + if (end2 > endpost): + end2 = film_l + + + print("--") + + # + # For testing it is faster to not have to re-run the + # XY drift correction calculations. + # + #with open("test.dat", "w") as fp: + # pickle.dump([centers, pairs], fp) + # + #with open("test.dat") as fp: + # [centers, pairs] = pickle.load(fp) + # + + # Prepare rij_x, rij_y, A matrix. + rij_x = numpy.zeros(len(pairs), dtype = numpy.float32) + rij_y = numpy.zeros(len(pairs), dtype = numpy.float32) + A = numpy.zeros((len(pairs),len(centers)), dtype = numpy.float32) + for i, pair in enumerate(pairs): + rij_x[i] = pair[2] + rij_y[i] = pair[3] + A[i,pair[0]:pair[1]] = 1.0 + + # Calculate drift (pass1). + # dx and dy contain the optimal offset between sub image i and sub image i+1 in x/y. + pinv_A = numpy.linalg.pinv(A) + dx = numpy.dot(pinv_A, rij_x) + dy = numpy.dot(pinv_A, rij_y) + + # Calculate errors. + err_x = numpy.dot(A, dx) - rij_x + err_y = numpy.dot(A, dy) - rij_y + + err_d = numpy.sqrt(err_x * err_x + err_y * err_y) + arg_sort_err = numpy.argsort(err_d) + + # Print errors before. + if 0: + print("Before:") + for i in range(err_d.size): + print(i, rij_x[i], rij_y[i], A[i,:], err_d[i]) + print("") + + # Remove bad values. + j = len(arg_sort_err) - 1 + + while (j > 0) and (err_d[arg_sort_err[j]] > max_err): + index = arg_sort_err[j] + delA = numpy.delete(A, index, 0) + if (numpy.linalg.matrix_rank(delA, tol = 1.0) == (len(centers)-1)): + print(j, "removing", index, "with error", err_d[index]) + A = delA + rij_x = numpy.delete(rij_x, index, 0) + rij_y = numpy.delete(rij_y, index, 0) + err_d = numpy.delete(err_d, index, 0) + arg_sort_err[(arg_sort_err > index)] -= 1 + else: + print("not removing", index, "with error", err_d[index]) + j -= 1 + + # Print errors after. + if 0: + print("") + print("After:") + for i in range(err_d.size): + print(i, rij_x[i], rij_y[i], A[i,:], err_d[i]) + print("") + + # Calculate drift (pass2). + pinv_A = numpy.linalg.pinv(A) + dx = numpy.dot(pinv_A, rij_x) + dy = numpy.dot(pinv_A, rij_y) + + + # Integrate to get final drift. + driftx = numpy.zeros((dx.size)) + drifty = numpy.zeros((dy.size)) + for i in range(dx.size): + driftx[i] = numpy.sum(dx[0:i]) + drifty[i] = numpy.sum(dy[0:i]) + + if 1: + for i in range(driftx.size): + print(i, centers[i], driftx[i], drifty[i]) + + # Create spline for interpolation. + final_driftx = interpolateData(centers, driftx) + final_drifty = interpolateData(centers, drifty) + + # Plot XY drift. + if 0: + import matplotlib + import matplotlib.pyplot as pyplot + + x = numpy.arange(film_l) + fig = pyplot.figure() + ax = fig.add_subplot(111) + ax.plot(x, final_driftx, color = 'blue') + ax.plot(x, final_drifty, color = 'red') + pyplot.show() + + # Z correction. + if not correct_z: + saveDriftData(final_driftx, + final_drifty, + numpy.zeros(film_l)) + exit() + + print("") + print("Performing Z Correction.") + + start = 0 + z_bins = 20 + i3_data.loadDataInFrames(fmin = start, fmax = start+step) + + if correct_z: + z_bins = 20 + xyzmaster = i3_data.i3To3DGridAllChannelsMerged(z_bins, + uncorrected = True) + + j = 0 + index = 0 + old_dz = 0.0 + driftz = numpy.zeros((dx.size)) + while(j < film_l): + + # Load correct frame range. + if ((j + 2*step) >= film_l): + i3_data.loadDataInFrames(fmin = j) + step_step = 2*step + else: + i3_data.loadDataInFrames(fmin = j, fmax = j + step) + step_step = step + + # Apply XY drift correction. + i3_data.applyXYDriftCorrection(driftx[index], drifty[index]) + + # Z correlation + dz = old_dz + + xyzcurr = i3_data.i3To3DGridAllChannelsMerged(z_bins, + uncorrected = True) + + [corr, fit, dz, z_success] = imagecorrelation.zOffset(xyzmaster, xyzcurr) + + # Update Values + if z_success: + old_dz = dz + else: + dz = old_dz + + dz = dz * 1000.0/float(z_bins) + + if z_success: + i3_data.applyZDriftCorrection(-dz) + xyzmaster += i3_data.i3To3DGridAllChannelsMerged(z_bins) + + driftz[index] = dz + + if z_success: + print(index, dz, "good") + else: + print(index, dz, "bad") + + index += 1 + j += step_step + + final_driftz = interpolateData(centers, driftz) + + saveDriftData(final_driftx, + final_drifty, + final_driftz) + + # Plot X,Y, Z drift. + if show_plot: + import matplotlib + import matplotlib.pyplot as pyplot + + pixel_size = 160.0 # pixel size in nm. + x = numpy.arange(film_l) + fig = pyplot.figure() + ax = fig.add_subplot(111) + ax.plot(x, pixel_size * final_driftx, color = 'red') + ax.plot(x, pixel_size * final_drifty, color = 'green') + ax.plot(x, final_driftz, color = 'blue') + pyplot.show() + + +if __name__ == "__main__": + + + parser = argparse.ArgumentParser(description='A Python implementation of the drift algorith') + + parser.add_argument('--bin', dest='bin', type=str, required=True) + parser.add_argument('--drift', dest='drift', type=str, required=True) + parser.add_argument('--step', dest='step', type=float, required=True) + parser.add_argument('--scale', dest='scale', type=float, required=True) + parser.add_argument('--zcorrect', dest='correct_z', type=bool, required=False, default=True) + parser.add_argument('--show_plot', dest='show_plot', type=bool, required=False, default=False) + + args = parser.parse_args() + + rcc(args.bin, args.drift, args.step, args.scale, correct_z=args.correct_z, show_plot=args.show_plot) + +# +# The MIT License +# +# Copyright (c) 2014 Zhuang Lab, Harvard University +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# diff --git a/storm_analysis/test/run_tests.py b/storm_analysis/test/run_tests.py index bba2d763..ed324e46 100644 --- a/storm_analysis/test/run_tests.py +++ b/storm_analysis/test/run_tests.py @@ -21,25 +21,32 @@ [dao_exe, "test.dax", "test_3d_2d.bin", "test_3d_2d.xml"], [dao_exe, "test.dax", "test_3d_3d.bin", "test_3d_3d.xml"], [dao_exe, "test.dax", "test_3d_Z.bin", "test_3d_Z.xml"], - ["Testing FRC"], - ["../frc/frc_calc2d.py", "test_drift_mlist.bin", "test_drift_frc.txt", "0"], + + #["Testing FRC"], + #["../frc/frc_calc2d.py", "test_drift_mlist.bin", "test_drift_frc.txt", "0"], + ["Testing L1H"], ["../L1H/setup_A_matrix.py", "theoritical", "test_l1h", "1.0", "0"], ["../L1H/cs_analysis.py", "test_l1h.dax", "test_l1h.xml", "test_l1h.hres", "test_l1h_list.bin"], - ["Testing RCC"], - ["../rcc/rcc-drift-correction.py", "test_drift_mlist.bin", "test_drift.txt", "2000", "1"], + + #["Testing RCC"], + #["../rcc/rcc-drift-correction.py", "test_drift_mlist.bin", "test_drift.txt", "2000", "1"], + ["Testing Rolling Ball Background Subtraction"], ["../rolling_ball_bgr/rolling_ball.py", "test_bg_sub.dax", "test_bg_sub_rb.dax", "10", "1"], + ["Testing sCMOS"], [scmos_exe, "test.dax", "test_sc_2d_fixed.bin", "test_sc_2d_fixed.xml"], [scmos_exe, "test.dax", "test_sc_2d.bin", "test_sc_2d.xml"], [scmos_exe, "test.dax", "test_sc_3d.bin", "test_sc_3d.xml"], [scmos_exe, "test.dax", "test_sc_Z.bin", "test_sc_Z.xml"], + ["Testing Spliner"], ["../spliner/measure_psf.py", "test_spliner.dax", "none", "test_spliner_olist.bin", "test_spliner_psf.psf", "1"], ["../spliner/psf_to_spline.py", "test_spliner_psf.psf", "test_spliner_psf.spline", "10"], [spliner_exe, "test_spliner.dax", "test_spliner_slist.bin", "test_spliner_dh.xml"], [spliner_exe, "test_spliner.dax", "test_spliner_flist.bin", "test_spliner_dh_fista.xml"], + ["Testing Track/Average/Correct"], ["../sa_utilities/track_average_correct.py", "test_drift_mlist.bin", "test_drift_alist.bin", "test_drift.xml"], @@ -47,9 +54,11 @@ ["Testing DBSCAN Clustering"], ["../dbscan/dbscan_analysis.py", "test_drift_alist.bin", "0"], ["../dbscan/cluster_images.py", "test_drift_aclusters_list.bin", "DBSCAN Clustering", "50", "20", "test_drift_dbscan"], + ["Testing Voronoi Clustering"], ["../voronoi/voronoi_analysis.py", "test_drift_alist.bin", "1.25", "./"], ["../dbscan/cluster_images.py", "test_drift_asrt_list.bin", "Voronoi Clustering", "50", "20", "test_drift_vr"], + ["Testing Wavelet Background Subtraction"], ["../wavelet_bgr/wavelet_bgr.py", "test_bg_sub.dax", "test_bg_sub_wbgr.dax", "db4", "2", "2", "10"] ] diff --git a/storm_analysis/test/test_c_extensions_import.py b/storm_analysis/test/test_c_extensions_import.py index 7052d8fb..4f513525 100644 --- a/storm_analysis/test/test_c_extensions_import.py +++ b/storm_analysis/test/test_c_extensions_import.py @@ -3,7 +3,7 @@ import platform def test_c_extensions_import(): - # Try to import all C extension + "Try to import all C extension" import storm_analysis.sa_library.ia_utilities_c import storm_analysis.sa_library.multi_fit_c diff --git a/storm_analysis/test/test_frc.py b/storm_analysis/test/test_frc.py new file mode 100644 index 00000000..f6074aca --- /dev/null +++ b/storm_analysis/test/test_frc.py @@ -0,0 +1,14 @@ +import storm_analysis + + +def test_frc(): + + in_list = storm_analysis.get_data("test/data/test_drift_mlist.bin") + results = storm_analysis.get_path_output_test("test_drift_frc.txt") + + from storm_analysis.frc.frc_calc2d import calc2d + calc2d(in_list, results, False) + + +if __name__ == "__main__": + test_frc() \ No newline at end of file diff --git a/storm_analysis/test/test_rcc.py b/storm_analysis/test/test_rcc.py new file mode 100644 index 00000000..7b347b7a --- /dev/null +++ b/storm_analysis/test/test_rcc.py @@ -0,0 +1,14 @@ +import storm_analysis + + +def test_rcc(): + + bin = storm_analysis.get_data("test/data/test_drift_mlist.bin") + drift = storm_analysis.get_path_output_test("test_drift.txt") + + from storm_analysis.rcc.rcc_drift_correction import rcc + rcc(bin, drift, 2000, 1) + + +if __name__ == "__main__": + test_rcc() \ No newline at end of file