commit 0737f1e615f8975fb4b2861b8944f0a057741fa1 Author: ismagom Date: Tue Jan 28 11:41:17 2014 +0000 Initial commit diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..c677c9f9e --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,42 @@ +if(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) + message(FATAL_ERROR "Prevented in-tree build. This is bad practice.") +endif(${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR}) + +cmake_minimum_required (VERSION 2.6) +project (osldlib) + + +# The version number. +set (OSLDLIB_VERSION_MAJOR 0) +set (OSLDLIB_VERSION_MINOR 0) + + +set(CPACK_PACKAGE_VERSION_MAJOR ${OSLDLIB_VERSION_MAJOR}) +set(CPACK_PACKAGE_VERSION_MINOR ${OSLDLIB_VERSION_MINOR}) +set(CPACK_PACKAGE_VERSION_PATCH "1") +set(CPACK_SOURCE_GENERATOR "TBZ2") +set(CPACK_SOURCE_PACKAGE_FILE_NAME + "${CMAKE_PROJECT_NAME}-${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}") +set(CPACK_SOURCE_IGNORE_FILES + "${CMAKE_CURRENT_BINARY_DIR};/.bzr/;~$;${CPACK_SOURCE_IGNORE_FILES}") +include(CPack) +add_custom_target(dist COMMAND ${CMAKE_MAKE_PROGRAM} package_source) + +option(DEBUG "Compiles with debugging symbols and no optimizations" OFF) + +if(DEBUG) + message("-- Configuring debugging CFLAGS") + set(CFDEB "-O0 -g -rdynamic") +else() + set(CFDEB "-O2") +endif() + +set(CMAKE_C_FLAGS "${CFDEB} -Wall -Wno-format-extra-args -Winline -Wno-unused-result -Wno-format -std=c99 -D_GNU_SOURCE") +set(CMAKE_BINARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + +### INCLUDES +include_directories("{CMAKE_CURRENT_SOURCE_DIR}/include") + +add_subdirectory(examples) +add_subdirectory(lib) + diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..6600f1c98 --- /dev/null +++ b/LICENSE @@ -0,0 +1,165 @@ +GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/README.md b/README.md new file mode 100644 index 000000000..f8f18e34d --- /dev/null +++ b/README.md @@ -0,0 +1,24 @@ +osld-lib +======== + +OSLD-lib is a free and open-source LTE library for SDR UE and eNodeB. The library does not rely on any external dependencies or frameworks. + + +The project contains a set of Python tools for the automatic code generation of modules for popular SDR frameworks, including GNURadio, ALOE++, IRIS, and OSSIE. These tools are easy to use and adapt for generating targets for specific platforms or frameworks. + +The DSP modules are based on OSLD (https://github.com/flexnets/aloe). + +## Examples + +Currently, only PSS/SSS decoding is available: + + +' +mkdir build +cd build +cmake ../ +make +examples/synch_test -i ../lte_signal.txt -N 0 +' + +Searches for an N_id_2 PSS/SSS signal in the provided file. diff --git a/cmake/FindFFTWS.cmake b/cmake/FindFFTWS.cmake new file mode 100644 index 000000000..ff28c85a7 --- /dev/null +++ b/cmake/FindFFTWS.cmake @@ -0,0 +1,22 @@ +# - Find FFTW +# Find the native FFTW includes and library +# +# FFTW_INCLUDES - where to find fftw3.h +# FFTW_LIBRARIES - List of libraries when using FFTW. +# FFTW_FOUND - True if FFTW found. + +if (FFTWS_INCLUDES) + # Already in cache, be silent + set (FFTWS_FIND_QUIETLY TRUE) +endif (FFTWS_INCLUDES) + +find_path (FFTWS_INCLUDES fftw3.h) +SET(CMAKE_FIND_LIBRARY_SUFFIXES .a) +find_library (FFTWfS_LIBRARIES NAMES fftw3f) +find_library (FFTWnS_LIBRARIES NAMES fftw3) +set(FFTWS_LIBRARIES ${FFTWfS_LIBRARIES} ${FFTWnS_LIBRARIES}) + +include (FindPackageHandleStandardArgs) +find_package_handle_standard_args (FFTWS DEFAULT_MSG FFTWS_LIBRARIES FFTWS_INCLUDES) + +mark_as_advanced (FFTWS_LIBRARIES FFTWS_INCLUDES) diff --git a/cmake/FindUHD.cmake b/cmake/FindUHD.cmake new file mode 100644 index 000000000..4f989718f --- /dev/null +++ b/cmake/FindUHD.cmake @@ -0,0 +1,26 @@ +INCLUDE(FindPkgConfig) +PKG_CHECK_MODULES(UHD uhd) +IF(NOT UHD_FOUND) + +FIND_PATH( + UHD_INCLUDE_DIRS + NAMES uhd/config.hpp + HINTS $ENV{UHD_DIR}/include + PATHS /usr/local/include + /usr/include +) + +FIND_LIBRARY( + UHD_LIBRARIES + NAMES uhd + HINTS $ENV{UHD_DIR}/lib + PATHS /usr/local/lib + /usr/lib + /usr/local/lib64 + /usr/local/lib32 +) + +INCLUDE(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(UHD DEFAULT_MSG UHD_LIBRARIES UHD_INCLUDE_DIRS) + +ENDIF(NOT UHD_FOUND) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt new file mode 100644 index 000000000..70b0d7c29 --- /dev/null +++ b/examples/CMakeLists.txt @@ -0,0 +1,49 @@ +#include fftw3 directories + +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake/") + +find_package(FFTWS REQUIRED) +include_directories(${FFTWS_INCLUDE_DIRS}) + +find_package(UHD) + + +set(LIBRARIES osld m ${FFTWS_LIBRARIES}) + + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + +add_executable(hl_example hl_example.c) +target_link_libraries(hl_example ${LIBRARIES}) + +add_executable(ll_example ll_example.c) +target_link_libraries(ll_example ${LIBRARIES}) + +add_executable(synch_test synch_test.c) +target_link_libraries(synch_test ${LIBRARIES}) + +add_executable(equalizer_test equalizer_test.c) +target_link_libraries(equalizer_test ${LIBRARIES}) + +add_executable(viterbi_test viterbi_test.c) +target_link_libraries(viterbi_test ${LIBRARIES}) + +add_executable(bch_test bch_test.c) +target_link_libraries(bch_test ${LIBRARIES}) + +add_executable(cell_search cell_search.c) +target_link_libraries(cell_search ${LIBRARIES}) + + +include_directories(${UHD_INCLUDE_DIRS} ${CMAKE_CURRENT_SOURCE_DIR}/../uhd) +add_executable(rssi_scan_usrp rssi_scan_usrp.c ../uhd/uhd_imp.cpp ../uhd/uhd_utils.c) +target_link_libraries(rssi_scan_usrp ${LIBRARIES} ${UHD_LIBRARIES}) + + +include_directories(${UHD_INCLUDE_DIRS} ${CMAKE_CURRENT_SOURCE_DIR}/../uhd) +add_executable(pss_scan_usrp pss_scan_usrp.c ../uhd/uhd_imp.cpp ../uhd/uhd_utils.c) +target_link_libraries(pss_scan_usrp ${LIBRARIES} ${UHD_LIBRARIES}) + + + + diff --git a/examples/bch_test.c b/examples/bch_test.c new file mode 100644 index 000000000..84bb6fdcb --- /dev/null +++ b/examples/bch_test.c @@ -0,0 +1,130 @@ +#include +#include +#include +#include + +#include "lte.h" + +char *input_file_name; +int frame_length=1920, symbol_sz=128, nof_slots=1; + +void usage(char *prog) { + printf("Usage: %s [ls] -i input_file\n", prog); + printf("\t-l frame_length [Default %d]\n", frame_length); + printf("\t-s symbol_sz [Default %d]\n", symbol_sz); + printf("\t-n nof_frames [Default %d]\n", nof_slots); + printf("\t-v [set verbose to debug, default none]\n"); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "ilsnv")) != -1) { + switch(opt) { + case 'i': + input_file_name = argv[optind]; + break; + case 'l': + frame_length = atoi(argv[optind]); + break; + case 's': + symbol_sz = atoi(argv[optind]); + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'v': + verbose++; + break; + default: + usage(argv[0]); + exit(-1); + } + } + if (!input_file_name) { + usage(argv[0]); + exit(-1); + } +} + +void fft_run_slot(dft_plan_t *fft_plan, cf_t *input, cf_t *output) { + int i; + for (i=0;i<7;i++) { + input += CP_NORM(i, symbol_sz); + dft_run_c2c(fft_plan, input, output); + input += symbol_sz; + output += symbol_sz; + } +} + +int main(int argc, char **argv) { + filesource_t fsrc; + pbch_mib_t pbch_data; + pbch_t pbch; + dft_plan_t fft_plan; + + int frame_cnt; + cf_t *input, *outfft; + + if (argc < 3) { + usage(argv[0]); + exit(-1); + } + + parse_args(argc,argv); + + if (filesource_init(&fsrc, input_file_name, COMPLEX_FLOAT)) { + fprintf(stderr, "Error opening file %s\n", input_file_name); + exit(-1); + } + + input = malloc(frame_length*sizeof(cf_t)); + if (!input) { + perror("malloc"); + exit(-1); + } + outfft = malloc(7*symbol_sz*sizeof(cf_t)); + if (!outfft) { + perror("malloc"); + exit(-1); + } + + /* Init FFT plan */ + if (dft_plan_c2c(symbol_sz, FORWARD, &fft_plan)) { + fprintf(stderr, "Error initiating FFT plan\n"); + exit(-1); + } + fft_plan.options = DFT_DC_OFFSET | DFT_MIRROR_POS | DFT_NORMALIZE; + + if (pbch_init(&pbch, 0, CPNORM)) { + fprintf(stderr, "Error initiating PBCH\n"); + exit(-1); + } + int res = 0; + frame_cnt = 0; + while (frame_length == filesource_read(&fsrc, input, frame_length) + && frame_cnt < nof_slots + && res == 0) { + + fft_run_slot(&fft_plan, &input[960], outfft); + + res = pbch_decode(&pbch, outfft, &pbch_data, 6, 1); + if (res == -1) { + fprintf(stderr, "Error decoding PBCH\n"); + break; + } + frame_cnt++; + } + + if (res == 1) { + printf("MIB found\n"); + } else { + printf("MIB not found after %d frames\n", frame_cnt); + } + + pbch_free(&pbch); + free(input); + free(outfft); + + printf("Done\n"); + exit(0); +} diff --git a/examples/cell_search.c b/examples/cell_search.c new file mode 100644 index 000000000..0df21c6e9 --- /dev/null +++ b/examples/cell_search.c @@ -0,0 +1,272 @@ +#include +#include +#include +#include +#include + +#include "lte.h" + +char *input_file_name = NULL; +int nof_slots=100; +float corr_peak_threshold=2.5; +int ntime = 4; +int nfreq = 10; +int file_binary = 0; +int force_N_id_2=-1; + +filesource_t fsrc; +cf_t *input_buffer, *fft_buffer; +pbch_t pbch; +dft_plan_t fft_plan; +chest_t chest; +sync_t synch; + +void usage(char *prog) { + printf("Usage: %s [onlt] -i input_file\n", prog); + printf("\t-n number of frames [Default %d]\n", nof_slots); + printf("\t-t correlation threshold [Default %g]\n", corr_peak_threshold); + printf("\t-v [set verbose to debug, default none]\n"); + printf("\t-b Input files is binary [Default %s]\n", file_binary?"yes":"no"); + printf("\t-f force_N_id_2 [Default %d]\n", force_N_id_2); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "intvbf")) != -1) { + switch(opt) { + case 'i': + input_file_name = argv[optind]; + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 't': + corr_peak_threshold = atof(argv[optind]); + break; + case 'b': + file_binary = 1; + break; + case 'v': + verbose++; + break; + case 'f': + force_N_id_2 = atoi(argv[optind]); + break; + default: + usage(argv[0]); + exit(-1); + } + } + if (!input_file_name) { + usage(argv[0]); + exit(-1); + } +} + +int base_init() { + file_data_type_t type = file_binary?COMPLEX_FLOAT_BIN:COMPLEX_FLOAT; + if (filesource_init(&fsrc, input_file_name, type)) { + fprintf(stderr, "Error opening file %s\n", input_file_name); + exit(-1); + } + + input_buffer = malloc(4 * 960 * sizeof(cf_t)); + if (!input_buffer) { + perror("malloc"); + exit(-1); + } + fft_buffer = malloc(CPNORM_NSYMB * 128 * sizeof(cf_t)); + if (!fft_buffer) { + perror("malloc"); + return -1; + } + + /* Init FFT plan */ + if (dft_plan_c2c(128, FORWARD, &fft_plan)) { + fprintf(stderr, "Error initiating FFT plan\n"); + return -1; + } + fft_plan.options = DFT_DC_OFFSET | DFT_MIRROR_POS | DFT_NORMALIZE; + + DEBUG("Memory init OK\n",0); + return 0; +} + + + +int mib_decoder_init(int cell_id) { + + /* + if (chest_LTEDL_init(&chest, ntime, nfreq, CPNORM_NSYMB, cell_id, 6)) { + fprintf(stderr, "Error initiating LTE equalizer\n"); + return -1; + } + */ + + DEBUG("Channel estimation initiated ntime=%d nfreq=%d\n", ntime, nfreq); + + if (pbch_init(&pbch, cell_id, CPNORM)) { + fprintf(stderr, "Error initiating PBCH\n"); + return -1; + } + DEBUG("PBCH initiated cell_id=%d\n", cell_id); + return 0; +} + +void fft_run_slot(dft_plan_t *fft_plan, cf_t *input, cf_t *output) { + int i; + for (i=0;i= 4) { + state = SYNC; + } + } + mib_attempts++; + } + break; + case DONE: + INFO("State Done, Slot idx=%d\n", slot_idx); + pbch_mib_fprint(stdout, &mib); + printf("Done\n"); + break; + } + + if (read_length) { + slot_idx++; + if (slot_idx == 20) { + slot_idx = 0; + } + } + } + + sync_free(&synch); + filesource_close(&fsrc); + + free(input_buffer); + + printf("Done\n"); + exit(0); +} diff --git a/examples/cmake/Modules/FindFFTWS.cmake b/examples/cmake/Modules/FindFFTWS.cmake new file mode 100644 index 000000000..ff28c85a7 --- /dev/null +++ b/examples/cmake/Modules/FindFFTWS.cmake @@ -0,0 +1,22 @@ +# - Find FFTW +# Find the native FFTW includes and library +# +# FFTW_INCLUDES - where to find fftw3.h +# FFTW_LIBRARIES - List of libraries when using FFTW. +# FFTW_FOUND - True if FFTW found. + +if (FFTWS_INCLUDES) + # Already in cache, be silent + set (FFTWS_FIND_QUIETLY TRUE) +endif (FFTWS_INCLUDES) + +find_path (FFTWS_INCLUDES fftw3.h) +SET(CMAKE_FIND_LIBRARY_SUFFIXES .a) +find_library (FFTWfS_LIBRARIES NAMES fftw3f) +find_library (FFTWnS_LIBRARIES NAMES fftw3) +set(FFTWS_LIBRARIES ${FFTWfS_LIBRARIES} ${FFTWnS_LIBRARIES}) + +include (FindPackageHandleStandardArgs) +find_package_handle_standard_args (FFTWS DEFAULT_MSG FFTWS_LIBRARIES FFTWS_INCLUDES) + +mark_as_advanced (FFTWS_LIBRARIES FFTWS_INCLUDES) diff --git a/examples/equalizer_test.c b/examples/equalizer_test.c new file mode 100644 index 000000000..2e626ef05 --- /dev/null +++ b/examples/equalizer_test.c @@ -0,0 +1,175 @@ +#include +#include +#include +#include + +#include "lte.h" + +char *input_file_name; +int nof_slots=1; +int cell_id = 0; +int port_id = 0; +int nof_prb = 6; +lte_cp_t cp = CPNORM; +int file_binary = 0; + +int in_slot_length() { + if (CP_ISNORM(cp)) { + return SLOT_LEN_CPNORM(lte_symbol_sz(nof_prb)); + } else { + return SLOT_LEN_CPEXT(lte_symbol_sz(nof_prb)); + } +} + +int slot_length() { + return CP_NSYMB(cp)*lte_symbol_sz(nof_prb); +} + + +void usage(char *prog) { + printf("Usage: %s [bncprev] -i input_file\n", prog); + printf("\t-b input file is binary [Default no]\n"); + printf("\t-n number of slots [Default %d]\n", nof_slots); + printf("\t-c cell_id [Default %d]\n", cell_id); + printf("\t-p port_id [Default %d]\n", port_id); + printf("\t-r nof_prb [Default %d]\n", nof_prb); + printf("\t-e [extended cyclic prefix, Default normal]\n"); + printf("\t-v [set verbose to debug, default none]\n"); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "bincprev")) != -1) { + switch(opt) { + case 'b': + file_binary = 1; + break; + case 'i': + input_file_name = argv[optind]; + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'c': + cell_id = atoi(argv[optind]); + break; + case 'p': + port_id = atoi(argv[optind]); + break; + case 'r': + nof_prb = atoi(argv[optind]); + break; + case 'e': + cp = CPEXT; + break; + case 'v': + PRINT_DEBUG; + break; + default: + usage(argv[0]); + exit(-1); + } + } + if (!input_file_name) { + usage(argv[0]); + exit(-1); + } +} + +int main(int argc, char **argv) { + filesource_t fsrc; + lte_fft_t fft; + FILE *f = NULL; + chest_t eq; + int slot_cnt; + cf_t *input = NULL; + cf_t *outfft = NULL; + cf_t *ce = NULL; + + if (argc < 3) { + usage(argv[0]); + exit(-1); + } + + parse_args(argc,argv); + + if (filesource_init(&fsrc, input_file_name, file_binary?COMPLEX_FLOAT_BIN:COMPLEX_FLOAT)) { + fprintf(stderr, "Error opening file %s\n", input_file_name); + goto do_exit; + } + f = fopen("output.m", "w"); + if (!f) { + perror("fopen"); + goto do_exit; + } + + input = malloc(in_slot_length()*sizeof(cf_t)); + if (!input) { + perror("malloc"); + goto do_exit; + } + outfft = malloc(slot_length()*sizeof(cf_t)); + if (!outfft) { + perror("malloc"); + goto do_exit; + } + ce = malloc(nof_prb * RE_X_RB * CP_NSYMB(cp) * sizeof(cf_t)); + if (!ce) { + perror("malloc"); + goto do_exit; + } + + if (lte_fft_init(&fft, cp, lte_symbol_sz(nof_prb))) { + fprintf(stderr, "Error: initializing FFT\n"); + goto do_exit; + } + if (chest_init(&eq, cp, nof_prb, 1)) { + fprintf(stderr, "Error initializing equalizer\n"); + goto do_exit; + } + if (chest_ref_LTEDL(&eq, cell_id)) { + fprintf(stderr, "Error initializing reference signal\n"); + goto do_exit; + } + + bzero(input, sizeof(cf_t) * in_slot_length()); + bzero(outfft, sizeof(cf_t) * slot_length()); + + /* read all file or nof_slots */ + slot_cnt = 0; + while (in_slot_length() == filesource_read(&fsrc, input, in_slot_length()) + && (slot_cnt < nof_slots || nof_slots == -1)) { + + lte_fft_run(&fft, input, outfft); + + chest_ce_slot_port(&eq, outfft, ce, slot_cnt%20, 0); + + chest_fprint(&eq, f, slot_cnt%20, 0); + + fprintf(f, "ce=["); + vec_fprint_c(f, ce, nof_prb * RE_X_RB * CP_NSYMB(cp)); + fprintf(f, "];\n"); + + slot_cnt++; + } + +do_exit: + chest_free(&eq); + lte_fft_free(&fft); + if (ce) { + free(ce); + } + if (outfft) { + free(outfft); + } + if (input) { + free(input); + } + if (f) { + fclose(f); + } + filesource_close(&fsrc); + + printf("Done\n"); + exit(0); +} diff --git a/examples/hl_example.c b/examples/hl_example.c new file mode 100644 index 000000000..f97678086 --- /dev/null +++ b/examples/hl_example.c @@ -0,0 +1,95 @@ +#include +#include +#include +#include + +#include "lte.h" + +void usage(char *arg) { + printf("Usage: %s nbits snr_db\n",arg); +} + +int main(int argc, char **argv) { + binsource_hl bs; + mod_hl mod; + ch_awgn_hl ch; + demod_soft_hl demod_s; + demod_hard_hl demod_h; + + bzero(&bs,sizeof(bs)); + bzero(&mod,sizeof(mod)); + bzero(&ch,sizeof(ch)); + bzero(&demod_s,sizeof(demod_s)); + bzero(&demod_h,sizeof(demod_h)); + + if (argc<3) { + usage(argv[0]); + exit(-1); + } + + int nbits = atoi(argv[1]); + float snr_db = atof(argv[2]); + float var = sqrt(pow(10,-snr_db/10)); + + bs.init.seed = 0; + bs.init.cache_seq_nbits = 0; + bs.ctrl_in.nbits = nbits; + bs.output = malloc(nbits); + + mod.in_len = nbits; + mod.init.std = LTE_BPSK; + mod.input = bs.output; + mod.output = malloc(nbits*sizeof(_Complex float)); + + ch.in_len = nbits; + ch.input = mod.output; + ch.ctrl_in.variance = var; + ch.output = malloc(nbits*sizeof(_Complex float)); + + demod_h.in_len = nbits; + demod_h.init.std = LTE_BPSK; + demod_h.input = ch.output; + demod_h.output = malloc(nbits); + + demod_s.in_len = nbits; + demod_s.init.std = LTE_BPSK; + demod_s.input = ch.output; + demod_s.output = malloc(sizeof(float)*nbits); + demod_s.ctrl_in.alg_type = APPROX; + demod_s.ctrl_in.sigma = var; + + if ( binsource_initialize(&bs) || + mod_initialize(&mod) || + ch_awgn_initialize(&ch) || + demod_hard_initialize(&demod_h) || + demod_soft_initialize(&demod_s) + ) { + printf("Error initializing modules\n"); + exit(-1); + } + + binsource_work(&bs); + mod_work(&mod); + ch_awgn_work(&ch); + demod_hard_work(&demod_h); + demod_soft_work(&demod_s); + + /* hard decision for soft demodulation */ + char* tmp = malloc(nbits); + for (int i=0;i0?1:0; + } + + printf("Hard errors: %u/%d\n",bit_diff(bs.output,demod_h.output,nbits),nbits); + printf("Soft errors: %u/%d\n",bit_diff(bs.output,tmp,nbits),nbits); + + free(bs.output); + free(mod.output); + free(ch.output); + free(demod_h.output); + free(demod_s.output); + free(tmp); + + printf("Exit\n"); + exit(0); +} diff --git a/examples/ll_example.c b/examples/ll_example.c new file mode 100644 index 000000000..edfc0068b --- /dev/null +++ b/examples/ll_example.c @@ -0,0 +1,23 @@ +#include +#include + +#include "lte.h" + +int main(int argc, char **argv) { + binsource_t bs; + char* output; + + binsource_init(&bs); + binsource_seed_time(&bs); + + output = malloc(100); + + if (binsource_generate(&bs,output,100)) { + printf("Error generating bits\n"); + exit(-1); + } + printf("output: "); + bit_fprint(stdout,output,100); + printf("Done\n"); + exit(0); +} diff --git a/examples/pss_scan_usrp.c b/examples/pss_scan_usrp.c new file mode 100644 index 000000000..afd700831 --- /dev/null +++ b/examples/pss_scan_usrp.c @@ -0,0 +1,350 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#include "lte.h" + +#include "uhd.h" +#include "uhd_utils.h" + +int nof_slots=1000; +int band, earfcn=-1; +float pss_threshold=15.0; +int earfcn_start, earfcn_end = -1; +float rssi_threshold = -42.0; + +cf_t *input_buffer; +float *cfo_v; +int *idx_v; +float *p2a_v; +void *uhd; +int nof_bands; +int force_N_id_2; +float gain = 30.0; + +#define MAX_EARFCN 1000 +lte_earfcn_t channels[MAX_EARFCN]; +float rssi[MAX_EARFCN]; +float freqs[MAX_EARFCN]; +float cfo[MAX_EARFCN]; +float p2a[MAX_EARFCN]; + +#define MHZ 1000000 +#define SAMP_FREQ 1920000 +#define RSSI_FS 1000000 +#define RSSI_NSAMP 50000 +#define FLEN 9600 +#define FLEN_PERIOD 0.005 + +#define IS_SIGNAL(i) (10*log10f(rssi[i]) + 30 > rssi_threshold) + +void print_to_matlab(); + +void usage(char *prog) { + printf("Usage: %s [senvtr] -b band\n", prog); + printf("\t-s earfcn_start [Default %d]\n", earfcn_start); + printf("\t-e earfcn_end [Default All]\n"); + printf("\t-n number of frames [Default %d]\n", nof_slots); + printf("\t-v [set verbose to debug, default none]\n"); + printf("\t-t pss_threshold [Default %.2f]\n", pss_threshold); + printf("\t-r rssi_threshold [Default %.2f dBm]\n", rssi_threshold); + printf("\t-f force_N_id_2 [Default no]\n"); + printf("\t-g gain [Default no %.2f dB]\n", gain); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "gfrtbsenv")) != -1) { + switch(opt) { + case 'g': + gain = atof(argv[optind]); + break; + case 'f': + force_N_id_2 = atoi(argv[optind]); + break; + case 't': + pss_threshold = atof(argv[optind]); + break; + case 'r': + rssi_threshold = -atof(argv[optind]); + break; + case 'b': + band = atoi(argv[optind]); + break; + case 's': + earfcn_start = atoi(argv[optind]); + break; + case 'e': + earfcn_end = atoi(argv[optind]); + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'v': + verbose++; + break; + default: + usage(argv[0]); + exit(-1); + } + } +} + +int base_init(int frame_length) { + + input_buffer = malloc(2 * frame_length * sizeof(cf_t)); + if (!input_buffer) { + perror("malloc"); + exit(-1); + } + + idx_v = malloc(nof_slots * sizeof(int)); + if (!idx_v) { + perror("malloc"); + exit(-1); + } + cfo_v = malloc(nof_slots * sizeof(float)); + if (!cfo_v) { + perror("malloc"); + exit(-1); + } + p2a_v = malloc(nof_slots * sizeof(float)); + if (!p2a_v) { + perror("malloc"); + exit(-1); + } + + bzero(cfo, sizeof(float) * MAX_EARFCN); + bzero(p2a, sizeof(float) * MAX_EARFCN); + + /* open UHD device */ + printf("Opening UHD device...\n"); + if (uhd_open("",&uhd)) { + fprintf(stderr, "Error opening uhd\n"); + exit(-1); + } + + return 0; +} + +void base_free() { + + uhd_close(&uhd); + free(input_buffer); + free(idx_v); + free(cfo_v); + free(p2a_v); +} + +float mean_valid(int *idx_v, float *x, int nof_frames) { + int i; + float mean = 0; + int n = 0; + for (i=0;i 0) { + return mean/n; + } else { + return 0.0; + } +} + +int main(int argc, char **argv) { + int frame_cnt; + int i; + int nsamples; + int cell_id; + sync_t synch; + float max_peak_to_avg; + float sfo; + + if (argc < 3) { + usage(argv[0]); + exit(-1); + } + + parse_args(argc,argv); + + if (base_init(FLEN)) { + fprintf(stderr, "Error initializing memory\n"); + exit(-1); + } + + if (sync_init(&synch)) { + fprintf(stderr, "Error initiating PSS/SSS\n"); + exit(-1); + } + sync_set_threshold(&synch, pss_threshold); + sync_pss_det_peakmean(&synch); + + if (force_N_id_2 != -1) { + sync_force_N_id_2(&synch, force_N_id_2); + } + + nof_bands = lte_band_get_fd_band(band, channels, earfcn_start, earfcn_end, MAX_EARFCN); + printf("RSSI scan: %d freqs in band %d\n", nof_bands, band); + for (i=0;i max_peak_to_avg) { + max_peak_to_avg = p2a_v[frame_cnt]; + cell_id = sync_get_cell_id(&synch); + } + cfo_v[frame_cnt] = sync_get_cfo(&synch); + } else { + cfo_v[frame_cnt] = 0.0; + } + if (frame_cnt) { + memcpy(input_buffer, &input_buffer[FLEN], FLEN * sizeof(cf_t)); + } + if (VERBOSE_ISINFO()) { + printf("[%4d] - idx: %5d\tpeak-to-avg: %3.2f\tcfo=%.3f\r", frame_cnt, + idx_v[frame_cnt], p2a_v[frame_cnt], cfo_v[frame_cnt]); + } + frame_cnt++; + } + + cfo[i] = mean_valid(idx_v, cfo_v, nof_slots); + p2a[i] = sum_r(p2a_v, nof_slots) / nof_slots; + if (channels[i].id == 1900 + || channels[i].id == 1901) { + vec_fprint_i(stdout, idx_v, nof_slots); + } + + sfo = sfo_estimate(idx_v, nof_slots, FLEN_PERIOD); + if (VERBOSE_ISINFO()) { + printf("\n"); + } + + printf("[%3d/%d]: EARFCN %d Freq. %.2f MHz, " + "RSSI %3.2f dBm, PSS %2.2f dB, CFO=%+2.1f KHz, SFO=%+2.1f KHz, CELL_ID=%3d\n", i, nof_bands, + channels[i].id, channels[i].fd, 10*log10f(rssi[i]) + 30, + 10*log10f(p2a[i]), cfo[i] * 15, sfo / 1000, cell_id); + print_to_matlab(); + + } else { + INFO("[%3d/%d]: EARFCN %d Freq. %.2f MHz. RSSI below threshold (%3.2f < %3.2f dBm)\n", + i, nof_bands, channels[i].id, channels[i].fd, 10*log10f(rssi[i]) + 30, rssi_threshold); + } + } + + print_to_matlab(); + + sync_free(&synch); + base_free(); + + printf("Done\n"); + exit(0); +} + +void print_to_matlab() { + int i; + + FILE *f = fopen("output.m", "w"); + if (!f) { + perror("fopen"); + exit(-1); + } + fprintf(f, "fd=["); + for (i=0;i +#include +#include +#include +#include +#include + +#include "lte.h" +#include "uhd.h" + +int nof_slots=1000; +int band; + +cf_t *input_buffer, *fft_buffer; +void *uhd; +int earfcn_start = -1, earfcn_end = -1; + +#define MAX_EARFCN 1000 +lte_earfcn_t channels[MAX_EARFCN]; + +#define MHZ 1000000 +#define SAMP_FREQ 1920000 + +void usage(char *prog) { + printf("Usage: %s [nvse] -b band\n", prog); + printf("\t-s earfcn_start [Default All]\n"); + printf("\t-e earfcn_end [Default All]\n"); + printf("\t-n number of frames [Default %d]\n", nof_slots); + printf("\t-v [set verbose to debug, default none]\n"); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "sebnv")) != -1) { + switch(opt) { + case 'b': + band = atoi(argv[optind]); + break; + case 's': + earfcn_start = atoi(argv[optind]); + break; + case 'e': + earfcn_end = atoi(argv[optind]); + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'v': + verbose++; + break; + default: + usage(argv[0]); + exit(-1); + } + } +} + +int base_init() { + + input_buffer = malloc(4 * 960 * sizeof(cf_t)); + if (!input_buffer) { + perror("malloc"); + exit(-1); + } + + /* open UHD device */ + printf("Opening UHD device...\n"); + if (uhd_open("",&uhd)) { + fprintf(stderr, "Error opening uhd\n"); + exit(-1); + } + + printf("Setting sampling frequency %.2f MHz\n", (float) SAMP_FREQ/MHZ); + uhd_set_rx_srate(uhd, SAMP_FREQ); + + printf("Starting receiver...\n"); + uhd_start_rx_stream(uhd); + return 0; +} + + + +int main(int argc, char **argv) { + int frame_cnt; + int i; + int nsamples; + float rssi[MAX_EARFCN]; + + if (argc < 3) { + usage(argv[0]); + exit(-1); + } + + parse_args(argc,argv); + + if (base_init()) { + fprintf(stderr, "Error initializing memory\n"); + exit(-1); + } + + int nof_bands = lte_band_get_fd_band(band, channels, earfcn_start, earfcn_end, MAX_EARFCN); + printf("Scanning %d freqs in band %d\n", nof_bands, band); + for (i=0;i +#include +#include +#include +#include + +#include "lte.h" + +char *input_file_name; +char *output_file_name="abs_corr.txt"; +int nof_slots=100, frame_length=9600, symbol_sz=128; +float corr_peak_threshold=25.0; +int file_binary = 0; +int out_N_id_2 = 0, force_N_id_2=-1; + +#define CFO_AUTO -9999.0 +float force_cfo = CFO_AUTO; + +void usage(char *prog) { + printf("Usage: %s [onlt] -i input_file\n", prog); + printf("\t-o output_file [Default %s]\n", output_file_name); + printf("\t-l frame_length [Default %d]\n", frame_length); + printf("\t-n number of frames [Default %d]\n", nof_slots); + printf("\t-t correlation threshold [Default %g]\n", corr_peak_threshold); + printf("\t-s symbol_sz [Default %d]\n", symbol_sz); + printf("\t-b Input files is binary [Default %s]\n", file_binary?"yes":"no"); + printf("\t-N out_N_id_2 [Default %d]\n", out_N_id_2); + printf("\t-f force_N_id_2 [Default %d]\n", force_N_id_2); + printf("\t-c force_cfo [Default disabled]\n"); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "ionltsbNfc")) != -1) { + switch(opt) { + case 'i': + input_file_name = argv[optind]; + break; + case 'o': + output_file_name = argv[optind]; + break; + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'l': + frame_length = atoi(argv[optind]); + break; + case 't': + corr_peak_threshold = atof(argv[optind]); + break; + case 's': + symbol_sz = atof(argv[optind]); + break; + case 'b': + file_binary = 1; + break; + case 'N': + out_N_id_2 = atoi(argv[optind]); + break; + case 'f': + force_N_id_2 = atoi(argv[optind]); + break; + case 'c': + force_cfo = atof(argv[optind]); + break; + default: + usage(argv[0]); + exit(-1); + } + } + if (!input_file_name) { + usage(argv[0]); + exit(-1); + } +} + +int main(int argc, char **argv) { + filesource_t fsrc; + filesink_t fsink; + pss_synch_t pss[3]; // One for each N_id_2 + sss_synch_t sss[3]; // One for each N_id_2 + int peak_pos[3]; + float *cfo; + float peak_value[3]; + float mean_value[3]; + int frame_cnt; + cf_t *input; + int m0, m1; + float m0_value, m1_value; + int N_id_2; + int sss_idx; + struct timeval tdata[3]; + int *exec_time; + + if (argc < 3) { + usage(argv[0]); + exit(-1); + } + + parse_args(argc,argv); + + gettimeofday(&tdata[1], NULL); + printf("Initializing...");fflush(stdout); + + file_data_type_t type = file_binary?COMPLEX_FLOAT_BIN:COMPLEX_FLOAT; + if (filesource_init(&fsrc, input_file_name, type)) { + fprintf(stderr, "Error opening file %s\n", input_file_name); + exit(-1); + } + if (filesink_init(&fsink, output_file_name, type)) { + fprintf(stderr, "Error opening file %s\n", output_file_name); + exit(-1); + } + + input = malloc(frame_length*sizeof(cf_t)); + if (!input) { + perror("malloc"); + exit(-1); + } + cfo = malloc(nof_slots*sizeof(float)); + if (!cfo) { + perror("malloc"); + exit(-1); + } + exec_time = malloc(nof_slots*sizeof(int)); + if (!exec_time) { + perror("malloc"); + exit(-1); + } + + /* We have 2 options here: + * a) We create 3 pss objects, each initialized with a different N_id_2 + * b) We create 1 pss object which scans for each N_id_2 one after another. + * a) requries more memory but has less latency and is paralellizable. + */ + for (N_id_2=0;N_id_2<3;N_id_2++) { + if (pss_synch_init(&pss[N_id_2], frame_length)) { + fprintf(stderr, "Error initializing PSS object\n"); + exit(-1); + } + if (pss_synch_set_N_id_2(&pss[N_id_2], N_id_2)) { + fprintf(stderr, "Error initializing N_id_2\n"); + exit(-1); + } + if (sss_synch_init(&sss[N_id_2])) { + fprintf(stderr, "Error initializing SSS object\n"); + exit(-1); + } + if (sss_synch_set_N_id_2(&sss[N_id_2], N_id_2)) { + fprintf(stderr, "Error initializing N_id_2\n"); + exit(-1); + } + } + gettimeofday(&tdata[2], NULL); + get_time_interval(tdata); + printf("done in %d s %d ms\n", (int) tdata[0].tv_sec, (int) tdata[0].tv_usec/1000); + + printf("\n\tFr.Cnt\tN_id_2\tN_id_1\tSubf\tPSS Peak/Avg\tIdx\tm0\tm1\tCFO\n"); + printf("\t===============================================================================\n"); + + /* read all file or nof_frames */ + frame_cnt = 0; + while (frame_length == filesource_read(&fsrc, input, frame_length) + && frame_cnt < nof_slots) { + + gettimeofday(&tdata[1], NULL); + if (force_cfo != CFO_AUTO) { + nco_cexp_f_direct(input, -force_cfo/128, frame_length); + } + + if (force_N_id_2 != -1) { + N_id_2 = force_N_id_2; + peak_pos[N_id_2] = pss_synch_find_pss(&pss[N_id_2], input, &peak_value[N_id_2], &mean_value[N_id_2]); + } else { + for (N_id_2=0;N_id_2<3;N_id_2++) { + peak_pos[N_id_2] = pss_synch_find_pss(&pss[N_id_2], input, &peak_value[N_id_2], &mean_value[N_id_2]); + } + float max_value=-99999; + N_id_2=-1; + int i; + for (i=0;i<3;i++) { + if (peak_value[i] > max_value) { + max_value = peak_value[i]; + N_id_2 = i; + } + } + } + + /* If peak detected */ + if (peak_value[N_id_2]/mean_value[N_id_2] > corr_peak_threshold) { + + sss_idx = peak_pos[N_id_2]-2*(symbol_sz+CP(symbol_sz,CPNORM_LEN)); + if (sss_idx >= 0) { + sss_synch_m0m1(&sss[N_id_2], &input[sss_idx], + &m0, &m0_value, &m1, &m1_value); + + cfo[frame_cnt] = pss_synch_cfo_compute(&pss[N_id_2], &input[peak_pos[N_id_2]-128]); + printf("\t%d\t%d\t%d\t%d\t%.3f\t\t%3d\t%d\t%d\t%.3f\n", + frame_cnt,N_id_2, sss_synch_N_id_1(&sss[N_id_2], m0, m1), + sss_synch_subframe(m0, m1), peak_value[N_id_2]/mean_value[N_id_2], + peak_pos[N_id_2], m0, m1, + cfo[frame_cnt]); + } + } + gettimeofday(&tdata[2], NULL); + get_time_interval(tdata); + exec_time[frame_cnt] = tdata[0].tv_usec; + frame_cnt++; + } + + int i; + float avg_time=0; + for (i=0;i +#include +#include +#include +#include +#include + +#include "lte.h" + +typedef _Complex float cf_t; + +int frame_length=1000, nof_slots=128; +float ebno_db = 5.0; +unsigned int seed=0; +bool tail_biting = false; + +char message[40] = {0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,1,1,0,1,0,0,0,0,1}; + +void usage(char *prog) { + printf("Usage: %s [nl]\n", prog); + printf("\t-n nof_frames [Default %d]\n", nof_slots); + printf("\t-l frame_length [Default %d]\n", frame_length); + printf("\t-e ebno in dB [Default %.2f dB]\n", ebno_db); + printf("\t-s seed [Default 0=time]\n"); + printf("\t-t tail_bitting [Default %s]\n", tail_biting?"yes":"no"); +} + +void parse_args(int argc, char **argv) { + int opt; + while ((opt = getopt(argc, argv, "nlste")) != -1) { + switch(opt) { + case 'n': + nof_slots = atoi(argv[optind]); + break; + case 'l': + frame_length = atoi(argv[optind]); + break; + case 'e': + ebno_db = atof(argv[optind]); + break; + case 's': + seed = atoi(argv[optind]); + break; + case 't': + tail_biting = true; + break; + default: + usage(argv[0]); + exit(-1); + } + } +} + +int main(int argc, char **argv) { + viterbi_t dec; + convcoder_t cod; + modem_table_t modem; + demod_soft_t demod; + int frame_cnt; + float *llr; + char *data_tx, *data_rx, *symbols; + cf_t *iq; + int i; + + parse_args(argc,argv); + + if (!seed) { + seed = time(NULL); + } + srand(seed); + + int coded_length = 3 * (frame_length + ((tail_biting)?0:6)); + + printf("Convolutional Code 1/3 K=7 Test\n"); + printf(" Frame length: %d\n", frame_length); + printf(" Codeword length: %d\n", coded_length); + printf(" Tail bitting: %s\n", tail_biting?"yes":"no"); + printf(" EbNo: %.2f\n", ebno_db); + + data_tx = malloc(frame_length * sizeof(char)); + if (!data_tx) { + perror("malloc"); + exit(-1); + } + + data_rx = malloc(frame_length * sizeof(char)); + if (!data_rx) { + perror("malloc"); + exit(-1); + } + + symbols = malloc(coded_length * sizeof(char)); + if (!symbols) { + perror("malloc"); + exit(-1); + } + llr = malloc(coded_length * sizeof(float)); + if (!llr) { + perror("malloc"); + exit(-1); + } + + iq = malloc(coded_length * sizeof(cf_t)); + if (!iq) { + perror("malloc"); + exit(-1); + } + + cod.K = 7; + cod.R = 3; + cod.tail_biting = tail_biting; + cod.framelength = frame_length; + cod.poly[0] = 0x6D; + cod.poly[1] = 0x4F; + cod.poly[2] = 0x57; + + float var = sqrt(pow(10,-ebno_db/10)); + + modem_table_init(&modem); + modem_table_std(&modem, LTE_QPSK, true); + demod_soft_init(&demod); + demod_soft_table_set(&demod, &modem); + demod_soft_alg_set(&demod, APPROX); + demod_soft_sigma_set(&demod, var); + + viterbi_init(&dec, CONVCODER_37, cod.poly, frame_length, tail_biting); + + /* read all file or nof_frames */ + frame_cnt = 0; + unsigned int errors=0; + while (frame_cnt < nof_slots) { + + /* generate data_tx */ + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef CHEST_ +#define CHEST_ + +#include + +#include "ch_estimation/refsignal.h" +#include "filter/filter2d.h" +#include "lte/base.h" + +typedef _Complex float cf_t; /* this is only a shortcut */ + +/** This is an OFDM channel estimator. + * It works with any reference signal pattern, provided by the object + * refsignal_t + * A 2-D filter is used for freq and time channel interpolation. + * + */ + +/* Low-level API */ +typedef struct { + int nof_ports; + int nof_symbols; + int nof_prb; + int symbol_sz; + lte_cp_t cp; + refsignal_t refsignal[MAX_PORTS][NSLOTS_X_FRAME]; +}chest_t; + +int chest_init(chest_t *q, lte_cp_t cp, int nof_prb, int nof_ports); +void chest_free(chest_t *q); + +int chest_ref_LTEDL_slot_port(chest_t *q, int port, int nslot, int cell_id); +int chest_ref_LTEDL_slot(chest_t *q, int nslot, int cell_id); +int chest_ref_LTEDL(chest_t *q, int cell_id); + +void chest_ce_ref(chest_t *q, cf_t *input, int nslot, int port_id, int nref); +void chest_ce_slot_port(chest_t *q, cf_t *input, cf_t *ce, int nslot, int port_id); +void chest_ce_slot(chest_t *q, cf_t *input, cf_t **ce, int nslot); + +void chest_fprint(chest_t *q, FILE *stream, int nslot, int port_id); +void chest_ref_fprint(chest_t *q, FILE *stream, int nslot, int port_id); +void chest_recvsig_fprint(chest_t *q, FILE *stream, int nslot, int port_id); +void chest_ce_fprint(chest_t *q, FILE *stream, int nslot, int port_id); +int chest_ref_symbols(chest_t *q, int port_id, int nslot, int l[2]); + +/* High-level API */ + +/** TODO: The high-level API has N interfaces, one for each port */ + +typedef struct { + chest_t obj; + struct chest_init { + int nof_symbols; // 7 for normal cp, 6 for extended + int port_id; + int nof_ports; + int cell_id; + int nof_prb; + int ntime; + int nfreq; + } init; + cf_t *input; + int in_len; + struct chest_ctrl_in { + int slot_id; // slot id in the 10ms frame + } ctrl_in; + cf_t *output; + int *out_len; +}chest_hl; + +#define DEFAULT_FRAME_SIZE 2048 + +int chest_initialize(chest_hl* h); +int chest_work(chest_hl* hl); +int chest_stop(chest_hl* hl); + +#endif diff --git a/include/ch_estimation/refsignal.h b/include/ch_estimation/refsignal.h new file mode 100644 index 000000000..7fec166af --- /dev/null +++ b/include/ch_estimation/refsignal.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef REFSIGNAL_ +#define REFSIGNAL_ + + +/* Object to manage reference signals for OFDM channel equalization. + * + * It generates the reference signals for LTE. + * + */ + +#include "lte/base.h" + +typedef _Complex float cf_t; + +typedef struct { + int time_idx; + int freq_idx; + cf_t simbol; + cf_t recv_simbol; +}ref_t; + +typedef struct { + int nof_refs; // number of reference signals + int *symbols_ref; // symbols with at least one reference + int nsymbols; // number of symbols with at least one reference + int voffset; // offset of the first reference in the freq domain + ref_t *refs; + cf_t *ch_est; +} refsignal_t; + +int refsignal_init_LTEDL(refsignal_t *q, int port_id, int nslot, + int cell_id, lte_cp_t cp, int nof_prb); + +void refsignal_free(refsignal_t *q); + +#endif diff --git a/include/channel/ch_awgn.h b/include/channel/ch_awgn.h new file mode 100644 index 000000000..6ab884aa1 --- /dev/null +++ b/include/channel/ch_awgn.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ +#include + +#ifndef CH_AWGN_ +#define CH_AWGN_ + +typedef _Complex float cf; + +void ch_awgn(const cf* input, cf* output, float variance, int buff_sz); + +/* High-level API */ + +typedef struct { + const cf* input; + int in_len; + struct ch_awgn_ctrl_in { + float variance; // Noise variance + } ctrl_in; + + cf* output; + int* out_len; +}ch_awgn_hl; + +int ch_awgn_initialize(ch_awgn_hl* hl); +int ch_awgn_work(ch_awgn_hl* hl); + +#endif diff --git a/include/fec/convcoder.h b/include/fec/convcoder.h new file mode 100644 index 000000000..80eafdd13 --- /dev/null +++ b/include/fec/convcoder.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef CONVCODER_ +#define CONVCODER_ + + +#include + +typedef enum { + CONVCODER_27, CONVCODER_29, CONVCODER_37, CONVCODER_39 +}viterbi_type_t; + +typedef struct { + void *ptr; + int R; + int K; + unsigned int framebits; + bool tail_biting; + int poly[3]; + int (*decode) (void*, float*, char*); + void (*free) (void*); +}viterbi_t; + +int viterbi_init(viterbi_t *q, viterbi_type_t type, int poly[3], int framebits, bool tail_bitting); +void viterbi_free(viterbi_t *q); +int viterbi_decode(viterbi_t *q, float *symbols, char *data); + +typedef struct { + int R; + int K; + int poly[3]; + int framelength; + bool tail_biting; +}convcoder_t; + +int conv_encode(convcoder_t *q, char *input, char *output); + +#endif diff --git a/include/fec/crc.h b/include/fec/crc.h new file mode 100644 index 000000000..da6216ae2 --- /dev/null +++ b/include/fec/crc.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef CRC_ +#define CRC_ + + +unsigned int crc(unsigned int crc, char *bufptr, int len, + int long_crc,unsigned int poly, int paste_word); + +#endif diff --git a/include/filter/filter2d.h b/include/filter/filter2d.h new file mode 100644 index 000000000..73d51dbfa --- /dev/null +++ b/include/filter/filter2d.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef FILTER2D_ +#define FILTER2D_ + +/* 2-D real filter of complex input + * + */ +typedef _Complex float cf_t; + +typedef struct { + int sztime; // Output signal size in the time domain + int szfreq; // Output signal size in the freq domain + int ntime; // 2-D Filter size in time domain + int nfreq; // 2-D Filter size in frequency domain + float **taps; // 2-D filter coefficients + cf_t *output; // Output signal +} filter2d_t; + +int filter2d_init (filter2d_t* q, float **taps, int ntime, int nfreq, int sztime, int szfreq); +int filter2d_init_default (filter2d_t* q, int ntime, int nfreq, int sztime, int szfreq); +void filter2d_free(filter2d_t *q); +void filter2d_reset(filter2d_t *q); +void filter2d_add(filter2d_t *q, cf_t h, int time_idx, int freq_idx); + +#endif diff --git a/include/io/binsource.h b/include/io/binsource.h new file mode 100644 index 000000000..fa7e7c2b6 --- /dev/null +++ b/include/io/binsource.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef BINSOURCE_ +#define BINSOURCE_ + + +#include + +/* Low-level API */ +typedef struct { + unsigned int seed; + uint32_t *seq_buff; + int seq_buff_nwords; + int seq_cache_nbits; + int seq_cache_rp; +}binsource_t; + +void binsource_init(binsource_t* q); +void binsource_free(binsource_t* q); +void binsource_seed_set(binsource_t* q, unsigned int seed); +void binsource_seed_time(binsource_t *q); +int binsource_cache_gen(binsource_t* q, int nbits); +void binsource_cache_cpy(binsource_t* q, char *bits, int nbits); +int binsource_generate(binsource_t* q, char *bits, int nbits); + +/* High-level API */ +typedef struct { + binsource_t obj; + struct binsource_init { + int cache_seq_nbits; // If non-zero, generates random bits on init + unsigned int seed; // If non-zero, uses as random seed, otherwise local time is used. + } init; + struct binsource_ctrl_in { + int nbits; // Number of bits to generate + } ctrl_in; + char* output; + int* out_len; +}binsource_hl; + +int binsource_initialize(binsource_hl* h); +int binsource_work( binsource_hl* hl); +int binsource_stop(binsource_hl* hl); + +#endif diff --git a/include/io/filesink.h b/include/io/filesink.h new file mode 100644 index 000000000..08acd6d7a --- /dev/null +++ b/include/io/filesink.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef FILESINK_ +#define FILESINK_ + +#include +#include + +#include "io/format.h" + +/* Low-level API */ +typedef struct { + FILE *f; + file_data_type_t type; +}filesink_t; + +int filesink_init(filesink_t *q, char *filename, file_data_type_t type); +void filesink_close(filesink_t *q); + +int filesink_write(filesink_t *q, void *buffer, int nsamples); + + +/* High-level API */ +typedef struct { + filesink_t obj; + struct filesink_init { + char *file_name; + int block_length; + int data_type; + } init; + void* input; + int in_len; +}filesink_hl; + +int filesink_initialize(filesink_hl* h); +int filesink_work( filesink_hl* hl); +int filesink_stop(filesink_hl* h); + +#endif diff --git a/include/io/filesource.h b/include/io/filesource.h new file mode 100644 index 000000000..0defaefdd --- /dev/null +++ b/include/io/filesource.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef FILESOURCE_ +#define FILESOURCE_ + +#include +#include + +#include "io/format.h" + +/* Low-level API */ +typedef struct { + FILE *f; + file_data_type_t type; +}filesource_t; + +int filesource_init(filesource_t *q, char *filename, file_data_type_t type); +void filesource_close(filesource_t *q); + +int filesource_read(filesource_t *q, void *buffer, int nsamples); + + +/* High-level API */ +typedef struct { + filesource_t obj; + struct filesource_init { + char *file_name; + int block_length; + int data_type; + } init; + struct filesource_ctrl_in { + int nsamples; // Number of samples to read + } ctrl_in; + void* output; + int* out_len; +}filesource_hl; + +int filesource_initialize(filesource_hl* h); +int filesource_work( filesource_hl* hl); +int filesource_stop(filesource_hl* h); + +#endif diff --git a/include/io/format.h b/include/io/format.h new file mode 100644 index 000000000..caa0d516a --- /dev/null +++ b/include/io/format.h @@ -0,0 +1,7 @@ + +#ifndef FORMAT_ +#define FORMAT_ + +typedef enum { FLOAT, COMPLEX_FLOAT, COMPLEX_SHORT, FLOAT_BIN, COMPLEX_FLOAT_BIN, COMPLEX_SHORT_BIN} file_data_type_t; + +#endif diff --git a/include/lte.h b/include/lte.h new file mode 100644 index 000000000..4e576303a --- /dev/null +++ b/include/lte.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#ifndef _LTE_ +#define _LTE_ + +#include "utils/bit.h" +#include "utils/convolution.h" +#include "utils/debug.h" +#include "utils/dft.h" +#include "utils/matrix.h" +#include "utils/mux.h" +#include "utils/nco.h" +#include "utils/pack.h" +#include "utils/vector.h" + +#include "lte/base.h" +#include "lte/fft.h" +#include "lte/sequence.h" + +#include "ch_estimation/chest.h" +#include "ch_estimation/refsignal.h" + +#include "channel/ch_awgn.h" + +#include "fec/convcoder.h" +#include "fec/crc.h" + +#include "filter/filter2d.h" + +#include "io/binsource.h" +#include "io/filesink.h" +#include "io/filesource.h" + +#include "modem/demod_hard.h" +#include "modem/demod_soft.h" +#include "modem/mod.h" +#include "modem/modem_table.h" + +#include "phch/pbch.h" + +#include "ratematching/rm_conv.h" + +#include "scrambling/scrambling.h" + +#include "resampling/interp.h" + +#include "sync/pss.h" +#include "sync/sfo.h" +#include "sync/sss.h" +#include "sync/sync.h" + + +#endif diff --git a/include/lte/base.h b/include/lte/base.h new file mode 100644 index 000000000..78c46200a --- /dev/null +++ b/include/lte/base.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef _LTEBASE_ +#define _LTEBASE_ + +#define NSUBFRAMES_X_FRAME 10 +#define NSLOTS_X_FRAME (2*NSUBFRAMES_X_FRAME) + +#define MAX_PORTS 4 + +typedef enum {CPNORM, CPEXT} lte_cp_t; + +#define MAX_NSYMB 7 + +#define CPNORM_NSYMB 7 +#define CPNORM_SF_NSYMB 2*CPNORM_NSYMB +#define CPNORM_0_LEN 160 +#define CPNORM_LEN 144 + +#define CPEXT_NSYMB 6 +#define CPEXT_SF_NSYMB 2*CPEXT_NSYMB +#define CPEXT_LEN 512 +#define CPEXT_7_5_LEN 1024 + +#define CP_ISNORM(cp) (cp==CPNORM) +#define CP_ISEXT(cp) (cp==CPEXT) +#define CP_NSYMB(cp) (CP_ISNORM(cp)?CPNORM_NSYMB:CPEXT_NSYMB) + +#define CP(symbol_sz, c) (c*symbol_sz/2048) +#define CP_NORM(symbol, symbol_sz) (symbol==0)?CP(symbol_sz,CPNORM_0_LEN):CP(symbol_sz,CPNORM_LEN) +#define CP_EXT(symbol_sz) CP(symbol_sz,CPEXT_LEN) + +#define SLOT_LEN_CPNORM(symbol_sz) (symbol_sz+CP(symbol_sz,CPNORM_0_LEN)+(CPNORM_NSYMB-1)*(symbol_sz+CP(symbol_sz,CPNORM_LEN))) +#define SLOT_LEN_CPEXT(symbol_sz) (CPEXT_NSYMB*(symbol_sz+CP(symbol_sz, CPEXT_LEN))) + +#define SF_LEN_CPNORM(symbol_sz) 2*SLOT_LEN_CPNORM(symbol_sz) +#define SF_LEN_CPEXT(symbol_sz) 2*SLOT_LEN_CPEXT(symbol_sz) + +#define SF_IDX_CPNORM(idx, symbol_sz) (idx==0?(CP(symbol_sz, CPNORM_0_LEN)):(CP(symbol_sz, CPNORM_0_LEN)+idx*(symbol_sz+CP(symbol_sz, CPNORM_LEN)))) +#define SF_IDX_CPEXT(idx, symbol_sz) (idx*(symbol_sz+CP(symbol_sz, CPEXT_LEN))) + +#define SLOT_IDX_CPNORM(idx, symbol_sz) (idx==0?0:symbol_sz*CPNORM_NSYMB) +#define SLOT_IDX_CPEXT(idx, symbol_sz) (idx==0?0:symbol_sz*CPEXT_NSYMB) + +#define MAX_PRB 110 +#define RE_X_RB 12 + +#define RS_VSHIFT(cell_id) (cell_id%6) + +#define GUARD_RE(nof_prb) ((lte_symbol_sz(nof_prb)-nof_prb*RE_X_RB)/2) + +#define SAMPLE_IDX(symbol_sz, symbol_idx, sample_idx) (symbol_idx*symbol_sz + sample_idx) + +const int lte_symbol_sz(int nof_prb); +int lte_re_x_prb(int ns, int symbol, int nof_ports, int nof_symbols); +int lte_voffset(int symbol_id, int cell_id, int nof_ports); + +#define NOF_LTE_BANDS 29 + + +typedef struct { + int id; + float fd; +}lte_earfcn_t; + +enum band_geographical_area { + ALL, NAR, APAC, EMEA, JAPAN, CALA, NA +}; + +float lte_band_fd(int earfcn); +int lte_band_get_fd_band(int band, lte_earfcn_t *earfcn, int earfcn_start, int earfcn_end, int max_elems); +int lte_band_get_fd_band_all(int band, lte_earfcn_t *earfcn, int max_nelems); +int lte_band_get_fd_region(enum band_geographical_area region, lte_earfcn_t *earfcn, int max_elems); + + + +#endif diff --git a/include/lte/fft.h b/include/lte/fft.h new file mode 100644 index 000000000..172ea9f1d --- /dev/null +++ b/include/lte/fft.h @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef LTEFFT_ +#define LTEFFT_ + + +#include +#include + +#include "lte/base.h" +#include "utils/dft.h" + +typedef _Complex float cf_t; /* this is only a shortcut */ + +/* This is common for both directions */ +typedef struct { + dft_plan_t fft_plan; + int nof_symbols; + int symbol_sz; + lte_cp_t cp_type; +}lte_fft_t; + +int lte_fft_init(lte_fft_t *q, lte_cp_t cp_type, int symbol_sz); +void lte_fft_free(lte_fft_t *q); +void lte_fft_run(lte_fft_t *q, cf_t *input, cf_t *output); + +int lte_ifft_init(lte_fft_t *q, lte_cp_t cp_type, int symbol_sz); +void lte_ifft_free(lte_fft_t *q); +void lte_ifft_run(lte_fft_t *q, cf_t *input, cf_t *output); + +#endif diff --git a/include/lte/sequence.h b/include/lte/sequence.h new file mode 100644 index 000000000..0b90b4518 --- /dev/null +++ b/include/lte/sequence.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef LTESEQ_ +#define LTESEQ_ + +#include "lte/base.h" + +typedef struct { + char *c; + int len; +}sequence_t; + +int sequence_init(sequence_t *q, int len); +void sequence_free(sequence_t *q); + +int sequence_LTEPRS(sequence_t *q, int len, int seed); + +int sequence_pbch(sequence_t *seq, lte_cp_t cp, int cell_id); +int sequence_pbch_crc(sequence_t *seq, int nof_ports); + +#endif diff --git a/include/modem/demod_hard.h b/include/modem/demod_hard.h new file mode 100644 index 000000000..197059800 --- /dev/null +++ b/include/modem/demod_hard.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef DEMOD_HARD_ +#define DEMOD_HARD_ + +#include +#include + +#include "modem_table.h" + +typedef _Complex float cf; + +typedef struct { + enum modem_std table; /* In this implementation, mapping table is hard-coded */ +}demod_hard_t; + + +void demod_hard_init(demod_hard_t* q); +void demod_hard_table(demod_hard_t* q, enum modem_std table); +int demod_hard_demodulate(demod_hard_t* q, const cf* symbols, char *bits, int nsymbols); + + + +/* High-level API */ +typedef struct { + demod_hard_t obj; + struct demod_hard_init { + enum modem_std std; // Symbol mapping standard (see modem_table.h) + } init; + + const cf* input; + int in_len; + + char* output; + int *out_len; +}demod_hard_hl; + +int demod_hard_initialize(demod_hard_hl* hl); +int demod_hard_work(demod_hard_hl* hl); + + +#endif diff --git a/include/modem/demod_soft.h b/include/modem/demod_soft.h new file mode 100644 index 000000000..ecc200dee --- /dev/null +++ b/include/modem/demod_soft.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef DEMOD_SOFT_ +#define DEMOD_SOFT_ + +#include +#include + +#include "modem_table.h" + +enum alg { EXACT, APPROX }; + +typedef struct { + float sigma; // noise power + enum alg alg_type; // soft demapping algorithm (EXACT or APPROX) + modem_table_t *table; // symbol mapping table (see modem_table.h) +}demod_soft_t; + +void demod_soft_init(demod_soft_t *q); +void demod_soft_table_set(demod_soft_t *q, modem_table_t *table); +void demod_soft_alg_set(demod_soft_t *q, enum alg alg_type); +void demod_soft_sigma_set(demod_soft_t *q, float sigma); +int demod_soft_demodulate(demod_soft_t *q, const cf* symbols, float* llr, int nsymbols); + + +/* High-level API */ +typedef struct { + demod_soft_t obj; + modem_table_t table; + + struct demod_soft_init { + enum modem_std std; // symbol mapping standard (see modem_table.h) + } init; + + const cf* input; + int in_len; + + struct demod_soft_ctrl_in { + float sigma; // Estimated noise variance + enum alg alg_type; // soft demapping algorithm (EXACT or APPROX) + }ctrl_in; + + float* output; + int *out_len; + +}demod_soft_hl; + +int demod_soft_initialize(demod_soft_hl* hl); +int demod_soft_work(demod_soft_hl* hl); +int demod_soft_stop(demod_soft_hl* hl); + + +#endif diff --git a/include/modem/mod.h b/include/modem/mod.h new file mode 100644 index 000000000..6b63dc7a6 --- /dev/null +++ b/include/modem/mod.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef MOD_ +#define MOD_ + +#include +#include + +#include "modem_table.h" + +typedef _Complex float cf; + +int mod_modulate(modem_table_t* table, const char *bits, cf* symbols, int nbits); + +/* High-level API */ +typedef struct { + modem_table_t obj; + struct mod_init { + enum modem_std std; // symbol mapping standard (see modem_table.h) + } init; + + const char* input; + int in_len; + + cf* output; + int *out_len; +}mod_hl; + +int mod_initialize(mod_hl* hl); +int mod_work(mod_hl* hl); +int mod_stop(mod_hl* hl); + +#endif diff --git a/include/modem/modem_table.h b/include/modem/modem_table.h new file mode 100644 index 000000000..e99eea137 --- /dev/null +++ b/include/modem/modem_table.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#ifndef MODEM_TABLE_ +#define MODEM_TABLE_ + +#include +#include +#include + +typedef _Complex float cf; +typedef struct { + int idx[2][6][32]; +}soft_table_t; + +typedef struct { + cf* symbol_table; // bit-to-symbol mapping + soft_table_t soft_table; // symbol-to-bit mapping (used in soft demodulating) + int nsymbols; // number of modulation symbols + int nbits_x_symbol; // number of bits per symbol +}modem_table_t; + + +// Modulation standards +enum modem_std { + LTE_BPSK, LTE_QPSK, LTE_QAM16, LTE_QAM64 +}; + +void modem_table_init(modem_table_t* q); +void modem_table_free(modem_table_t* q); +void modem_table_reset(modem_table_t* q); +int modem_table_set(modem_table_t* q, cf* table, soft_table_t *soft_table, int nsymbols, int nbits_x_symbol); +int modem_table_std(modem_table_t* q, enum modem_std table, bool compute_soft_demod); + +#endif diff --git a/include/phch/pbch.h b/include/phch/pbch.h new file mode 100644 index 000000000..86c156345 --- /dev/null +++ b/include/phch/pbch.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef PBCH_ +#define PBCH_ + +#include "lte/base.h" +#include "modem/mod.h" +#include "modem/demod_soft.h" +#include "scrambling/scrambling.h" +#include "ratematching/rm_conv.h" +#include "fec/convcoder.h" +#include "fec/crc.h" + +#define PBCH_RE_CPNORM 240 +#define PBCH_RE_CPEXT 216 + +typedef _Complex float cf_t; + +enum phich_length { NORMAL, EXTENDED}; +enum phich_resources { R_1_6, R_1_2, R_1, R_2}; + +typedef struct { + int nof_ports; + int nof_prb; + int sfn; + enum phich_length phich_length; + int phich_resources; +}pbch_mib_t; + +/* PBCH receiver */ +typedef struct { + int cell_id; + lte_cp_t cp; + + /* buffers */ + cf_t *pbch_symbols; + float *pbch_llr; + float *temp; + float *pbch_rm; + char *data; + + int frame_idx; + + /* tx & rx objects */ + modem_table_t mod; + demod_soft_t demod; + sequence_t seq_pbch; + viterbi_t decoder; + +}pbch_t; + +int pbch_init(pbch_t *q, int cell_id, lte_cp_t cp); +void pbch_free(pbch_t *q); +int pbch_decode(pbch_t *q, cf_t *slot1_symbols, pbch_mib_t *data, int nof_prb, float ebno); +void pbch_mib_fprint(FILE *stream, pbch_mib_t *mib); + + +bool pbch_exists(int nframe, int nslot); +int pbch_put(cf_t *pbch, cf_t *slot1_data, int nof_prb, lte_cp_t cp, int cell_id); +int pbch_get(cf_t *pbch, cf_t *slot1_data, int nof_prb, lte_cp_t cp, int cell_id); + +#endif diff --git a/include/ratematching/rm_conv.h b/include/ratematching/rm_conv.h new file mode 100644 index 000000000..2d9278a07 --- /dev/null +++ b/include/ratematching/rm_conv.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef RM_CONV_ +#define RM_CONV_ + +#define RX_NULL 10000 + +int rm_conv_rx(float *input, float *output, int in_len, int out_len); + + +/* High-level API */ +typedef struct { + struct rm_conv_init { + int direction; + } init; + void *input; // input type may be char or float depending on hard + int in_len; + struct rm_conv_ctrl_in { + int E; + int S; + } ctrl_in; + void *output; + int *out_len; +}rm_conv_hl; + +int rm_conv_initialize(rm_conv_hl* h); +int rm_conv_work(rm_conv_hl* hl); +int rm_conv_stop(rm_conv_hl* hl); + +#endif diff --git a/include/resampling/interp.h b/include/resampling/interp.h new file mode 100644 index 000000000..9d42e3a1c --- /dev/null +++ b/include/resampling/interp.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +typedef _Complex float cf_t; + + +void interp_linear_offset(cf_t *input, cf_t *output, int M, int len, int off_st, int off_end); +void interp_linear(cf_t *input, cf_t *output, int M, int len); diff --git a/include/scrambling/scrambling.h b/include/scrambling/scrambling.h new file mode 100644 index 000000000..f934de948 --- /dev/null +++ b/include/scrambling/scrambling.h @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef SCRAMBLING_ +#define SCRAMBLING_ + +#include "lte/sequence.h" +#include "lte/base.h" + +/* Scrambling has no state */ +void scrambling_bit(sequence_t *s, char *data); +void scrambling_float(sequence_t *s, float *data); +int scrambling_float_offset(sequence_t *s, float *data, int offset, int len); + + +/* High-level API */ + +/* channel integer values */ +#define PDSCH 0 /* also PUSCH */ +#define PCFICH 1 +#define PDCCH 2 +#define PBCH 3 +#define PMCH 4 +#define PUCCH 5 + +typedef struct { + sequence_t seq[NSUBFRAMES_X_FRAME]; +}scrambling_t; + +typedef struct { + scrambling_t obj; + struct scrambling_init { + int hard; + int q; + int cell_id; + int nrnti; + int nMBSFN; + int channel; + int nof_symbols; // 7 normal 6 extended + } init; + void *input; // input type may be char or float depending on hard + int in_len; + struct scrambling_ctrl_in { + int subframe; + } ctrl_in; + void *output; + int *out_len; +}scrambling_hl; + +#endif diff --git a/include/sync/pss.h b/include/sync/pss.h new file mode 100644 index 000000000..2a5c3409b --- /dev/null +++ b/include/sync/pss.h @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef PSS_ +#define PSS_ + +#include +#include +#include "utils/convolution.h" + +typedef _Complex float cf_t; /* this is only a shortcut */ + +#define CONVOLUTION_FFT +#define DEFAULT_CORRELATION_TH 10000 +#define DEFAULT_NOSYNC_TIMEOUT 5 + +#define PSS_LEN_FREQ 129 +#define PSS_LEN 62 +#define PSS_RE 6*12 + + + +/** The pss_synch_t object provides functions for fast computation of the crosscorrelation + * between the PSS and received signal and CFO estimation. Also, the function pss_synch_periodic() is designed + * to be called periodically every subframe, taking care of the correct data alignment with respect + * to the PSS sequence. + */ + + +/* Low-level API */ +typedef struct { + +#ifdef CONVOLUTION_FFT + conv_fft_cc_t conv_fft; +#endif + + int frame_size; + int N_id_2; + float current_cfo; + bool cfo_auto; // default true + int nof_nosync_frames; + int nosync_timeout_frames; // default 5 + float correlation_threshold; // default 10000 + int frame_start_idx; + int fb_wp; + + cf_t *pss_signal_freq; + cf_t *tmp_input; + float *conv_abs; + cf_t *frame_buffer; + cf_t *conv_output; + cf_t *tmp_nco; +}pss_synch_t; + +/* Basic functionality */ + +int pss_synch_init(pss_synch_t *q, int frame_size); +void pss_synch_free(pss_synch_t *q); +int pss_generate(cf_t *signal, int direction, int N_id_2); + +int pss_synch_set_N_id_2(pss_synch_t *q, int N_id_2); +int pss_synch_find_pss(pss_synch_t *q, cf_t *input, float *corr_peak_value, float *corr_mean_value); +float pss_synch_cfo_compute(pss_synch_t* q, cf_t *pss_recv); + + + + + + +/* Automatic frame management functions (for periodic calling) */ +int pss_synch_periodic(pss_synch_t *q, cf_t *input, cf_t *output, int nsamples); +void pss_synch_set_timeout(pss_synch_t *q, int nof_frames); +void pss_synch_set_threshold(pss_synch_t *q, float threshold); +void pss_synch_set_cfo_mode(pss_synch_t *q, bool cfo_auto); +float pss_synch_get_cfo(pss_synch_t *q); +int pss_synch_get_frame_start_idx(pss_synch_t *q); + + + + + + + + +/* High-level API */ + +typedef struct { + pss_synch_t obj; + struct pss_synch_init { + int frame_size; // if 0, 2048 + int unsync_nof_pkts; + int N_id_2; + int do_cfo; + } init; + cf_t *input; + int in_len; + struct pss_synch_ctrl_in { + int correlation_threshold; + float manual_cfo; + } ctrl_in; + cf_t *output; + int *out_len; +}pss_synch_hl; + +#define DEFAULT_FRAME_SIZE 2048 + +int pss_synch_initialize(pss_synch_hl* h); +int pss_synch_work(pss_synch_hl* hl); +int pss_synch_stop(pss_synch_hl* hl); + + +#endif diff --git a/include/sync/sfo.h b/include/sync/sfo.h new file mode 100644 index 000000000..a2a45748e --- /dev/null +++ b/include/sync/sfo.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef SFO_ +#define SFO_ + +float sfo_estimate(int *t0, int len, float period); + + +#endif diff --git a/include/sync/sss.h b/include/sync/sss.h new file mode 100644 index 000000000..1cb1850a8 --- /dev/null +++ b/include/sync/sss.h @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef SSS_ +#define SSS_ + +#include +#include + +#include "utils/dft.h" + +typedef _Complex float cf_t; /* this is only a shortcut */ + +/** gives the beginning of the SSS symbol (to be passed to sss_synch_m0m1). + * subframe_sz is the length of the subframe, e.g. 1920 for the 1.9 MHz + * symbol_sz is the OFDM symbol size (including CP), e.g. 137 for the 1.9 MHz + */ +#define SSS_SYMBOL_ST(subframe_sz, symbol_sz) (subframe_sz/2-2*symbol_sz) +#define SSS_POS_SYMBOL 33 + +#define SSS_DFT_LEN 128 +#define N_SSS 31 + +struct sss_tables { + int z1[N_SSS][N_SSS]; + int c[2][N_SSS]; + int s[N_SSS][N_SSS]; + int N_id_2; +}; + +/* Allocate 32 complex to make it multiple of 32-byte AVX instructions alignment requirement. + * Should use vect_malloc() to make it platform agnostic. + */ +struct fc_tables { + cf_t z1[N_SSS+1][N_SSS+1]; + cf_t c[2][N_SSS+1]; + cf_t s[N_SSS+1][N_SSS+1]; +}; + + +/* Low-level API */ +typedef struct { + + dft_plan_t dftp_input; + + float corr_peak_threshold; + int symbol_sz; + int subframe_sz; + + int N_id_1_table[30][30]; + struct fc_tables fc_tables; + +}sss_synch_t; + + +/* Basic functionality */ +int sss_synch_init(sss_synch_t *q); +void sss_synch_free(sss_synch_t *q); +void sss_generate(float *signal, int cell_id); + +int sss_synch_set_N_id_2(sss_synch_t *q, int N_id_2); + +void sss_synch_m0m1(sss_synch_t *q, cf_t *input, int *m0, float *m0_value, + int *m1, float *m1_value); +int sss_synch_subframe(int m0, int m1); +int sss_synch_N_id_1(sss_synch_t *q, int m0, int m1); + +int sss_synch_frame(sss_synch_t *q, cf_t *input, int *subframe_idx, int *N_id_1); +void sss_synch_set_threshold(sss_synch_t *q, float threshold); +void sss_synch_set_symbol_sz(sss_synch_t *q, int symbol_sz); +void sss_synch_set_subframe_sz(sss_synch_t *q, int subframe_sz); + + +/* High-level API */ + +typedef struct { + sss_synch_t obj; + struct sss_synch_init { + int N_id_2; + } init; + cf_t *input; + int in_len; + struct sss_synch_ctrl_in { + int symbol_sz; + int subframe_sz; + int correlation_threshold; + } ctrl_in; + struct sss_synch_ctrl_out { + int subframe_idx; + int N_id_1; + } ctrl_out; +}sss_synch_hl; + +#define DEFAULT_FRAME_SIZE 2048 + +int sss_synch_initialize(sss_synch_hl* h); +int sss_synch_work(sss_synch_hl* hl); +int sss_synch_stop(sss_synch_hl* hl); + +#endif + diff --git a/include/sync/sync.h b/include/sync/sync.h new file mode 100644 index 000000000..59a2c2ad3 --- /dev/null +++ b/include/sync/sync.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef SYNC_ +#define SYNC_ + +#include "pss.h" +#include "sss.h" +#include "sfo.h" + +enum sync_pss_det { ABSOLUTE, PEAK_MEAN}; + +typedef struct { + pss_synch_t pss[3]; // One for each N_id_2 + sss_synch_t sss[3]; // One for each N_id_2 + enum sync_pss_det pss_mode; + float threshold; + float peak_to_avg; + int force_N_id_2; + int N_id_2; + int N_id_1; + int slot_id; + float cfo; +}sync_t; + +int sync_run(sync_t *q, cf_t *input, int read_offset); +float sync_get_cfo(sync_t *q); +void sync_pss_det_absolute(sync_t *q); +void sync_pss_det_peakmean(sync_t *q); +void sync_force_N_id_2(sync_t *q, int force_N_id_2); +int sync_get_slot_id(sync_t *q); +float sync_get_peak_to_avg(sync_t *q); +int sync_get_N_id_2(sync_t *q); +int sync_get_N_id_1(sync_t *q); +int sync_get_cell_id(sync_t *q); +void sync_set_threshold(sync_t *q, float threshold); +int sync_init(sync_t *q); +void sync_free(sync_t *q); + +#endif + diff --git a/include/utils/bit.h b/include/utils/bit.h new file mode 100644 index 000000000..c6f59191b --- /dev/null +++ b/include/utils/bit.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef BIT_ +#define BIT_ + +#include +#include + +uint32_t bit_unpack(char **bits, int nof_bits); +void bit_pack(uint32_t value, char **bits, int nof_bits); +void bit_fprint(FILE *stream, char *bits, int nof_bits); +unsigned int bit_diff(char *x, char *y, int nbits); + +#endif + diff --git a/include/utils/convolution.h b/include/utils/convolution.h new file mode 100644 index 000000000..c6f3fef35 --- /dev/null +++ b/include/utils/convolution.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef CONVOLUTION_H_ +#define CONVOLUTION_H_ + +#include "utils/dft.h" + +typedef struct { + _Complex float *input_fft; + _Complex float *filter_fft; + _Complex float *output_fft; + _Complex float *output_fft2; + int input_len; + int filter_len; + int output_len; + dft_plan_t input_plan; + dft_plan_t filter_plan; + dft_plan_t output_plan; +}conv_fft_cc_t; + +int conv_fft_cc_init(conv_fft_cc_t *state, int input_len, int filter_len); +void conv_fft_cc_free(conv_fft_cc_t *state); +int conv_fft_cc_run(conv_fft_cc_t *state, _Complex float *input, _Complex float *filter, _Complex float *output); + +int conv_cc(_Complex float *input, _Complex float *filter, _Complex float *output, int input_len, int filter_len); + +#endif diff --git a/include/utils/debug.h b/include/utils/debug.h new file mode 100644 index 000000000..20914eedc --- /dev/null +++ b/include/utils/debug.h @@ -0,0 +1,37 @@ +#ifndef DEBUG_H +#define DEBUG_H + +#include + +#define VERBOSE_DEBUG 2 +#define VERBOSE_INFO 1 +#define VERBOSE_NONE 0 + +#include +void get_time_interval(struct timeval * tdata); + +#ifndef DEBUG_DISABLED + +extern int verbose; + +#define VERBOSE_ISINFO() (verbose==VERBOSE_INFO) +#define VERBOSE_ISDEBUG() (verbose==VERBOSE_DEBUG) + +#define PRINT_DEBUG verbose=VERBOSE_DEBUG +#define PRINT_INFO verbose=VERBOSE_INFO +#define PRINT_NONE verbose=VERBOSE_NONE + +#define DEBUG(_fmt, ...) if (verbose >= VERBOSE_DEBUG) \ + fprintf(stdout, "[DEBUG]: " _fmt, __VA_ARGS__) + +#define INFO(_fmt, ...) if (verbose >= VERBOSE_INFO) \ + fprintf(stdout, "[INFO]: " _fmt, __VA_ARGS__) + +#else + +#define DEBUG +#define INFO + +#endif + +#endif diff --git a/include/utils/dft.h b/include/utils/dft.h new file mode 100644 index 000000000..8f6d55d4c --- /dev/null +++ b/include/utils/dft.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef DFT_H_ +#define DFT_H_ + +#include + + +/* dft is a frontend to the fftw3 library. It facilitates the computation of complex or real DFT, + * power spectral density, normalization, etc. + * It also supports the creation of multiple FFT plans for different FFT sizes or options, selecting + * a different one at runtime. + */ + + +typedef enum { + COMPLEX_2_COMPLEX, REAL_2_REAL, COMPLEX_2_REAL +}dft_mode_t; + +typedef enum { + FORWARD, BACKWARD +}dft_dir_t; + + +#define DFT_MIRROR_PRE 1 +#define DFT_PSD 2 +#define DFT_OUT_DB 4 +#define DFT_MIRROR_POS 8 +#define DFT_NORMALIZE 16 +#define DFT_DC_OFFSET 32 + +typedef struct { + int size; + int sign; + void *in; + void *out; + void *p; + int options; + dft_dir_t dir; + dft_mode_t mode; +}dft_plan_t; + +typedef _Complex float dft_c_t; +typedef float dft_r_t; + +/* Create DFT plans */ + +int dft_plan(const int dft_points, dft_mode_t mode, dft_dir_t dir, dft_plan_t *plan); +int dft_plan_c2c(const int dft_points, dft_dir_t dir, dft_plan_t *plan); +int dft_plan_r2r(const int dft_points, dft_dir_t dir, dft_plan_t *plan); +int dft_plan_c2r(const int dft_points, dft_dir_t dir, dft_plan_t *plan); + +void dft_plan_free(dft_plan_t *plan); + + +/* Create a vector of DFT plans */ + +int dft_plan_vector(const int *dft_points, dft_mode_t *modes, dft_dir_t *dirs, + int nof_plans, dft_plan_t *plans); +int dft_plan_multi_c2c(const int *dft_points, dft_dir_t dir, int nof_plans, + dft_plan_t *plans); +int dft_plan_multi_c2r(const int *dft_points, dft_dir_t dir, int nof_plans, + dft_plan_t *plans); +int dft_plan_multi_r2r(const int *dft_points, dft_dir_t dir, int nof_plans, + dft_plan_t *plans); +void dft_plan_free_vector(dft_plan_t *plan, int nof_plans); + +/* Compute DFT */ + +void dft_run(dft_plan_t *plan, void *in, void *out); +void dft_run_c2c(dft_plan_t *plan, dft_c_t *in, dft_c_t *out); +void dft_run_r2r(dft_plan_t *plan, dft_r_t *in, dft_r_t *out); +void dft_run_c2r(dft_plan_t *plan, dft_c_t *in, dft_r_t *out); + +#endif + diff --git a/include/utils/matrix.h b/include/utils/matrix.h new file mode 100644 index 000000000..863ee8b01 --- /dev/null +++ b/include/utils/matrix.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#include + +#ifndef MATRIX_ +#define MATRIX_ + +typedef _Complex float cf_t; + +int matrix_init(void ***m, int sz_x, int sz_y, int elem_sz); +void matrix_free(void **q, int sz_x); +void matrix_bzero(void **q, int sz_x, int sz_y, int elem_sz); +void matrix_fprintf_cf(FILE *f, cf_t **q, int sz_x, int sz_y); +void matrix_fprintf_f(FILE *f, float **q, int sz_x, int sz_y); +void matrix_copy(void **dst, void **src, int sz_x, int sz_y, int elem_sz); +void matrix_dotprod_cf(cf_t **x, cf_t **y, cf_t **out, int sz_x, int sz_y); +void matrix_dotprod_float(float **x, float **y, float **out, int sz_x, int sz_y); +void matrix_dotprod_int(int **x, int **y, int **out, int sz_x, int sz_y); + +#endif + diff --git a/include/utils/mux.h b/include/utils/mux.h new file mode 100644 index 000000000..c96e29454 --- /dev/null +++ b/include/utils/mux.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#ifndef MUX_ +#define MUX_ + +void mux(void **input, void *output, int *input_lengths, int *input_padding_pre, int nof_inputs, + int sample_sz); + +void demux(void *input, void **output, int *output_lengths, + int *output_padding_pre, int *output_padding_post, int nof_outputs, + int sample_sz); + +#endif diff --git a/include/utils/nco.h b/include/utils/nco.h new file mode 100644 index 000000000..ae1cb912a --- /dev/null +++ b/include/utils/nco.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef NCO_ +#define NCO_ + +#include + +typedef struct { + int size; + float *cost; + float *sint; +}nco_t; + +void nco_init(nco_t *nco, int size); +void nco_destroy(nco_t *nco); + +float nco_sin(nco_t *nco, float phase); +float nco_cos(nco_t *nco, float phase); +void nco_sincos(nco_t *nco, float phase, float *sin, float *cos); +_Complex float nco_cexp(nco_t *nco, float arg); + +void nco_sin_f(nco_t *nco, float *x, float freq, int len); +void nco_cos_f(nco_t *nco, float *x, float freq, int len); +void nco_cexp_f(nco_t *nco, _Complex float *x, float freq, int len); +void nco_cexp_f_direct(_Complex float *x, float freq, int len); + +#endif diff --git a/include/utils/pack.h b/include/utils/pack.h new file mode 100644 index 000000000..a7df1ec9a --- /dev/null +++ b/include/utils/pack.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef PACK_ +#define PACK_ + +unsigned int unpack_bits(char **bits, int nof_bits); +void pack_bits(unsigned int value, char **bits, int nof_bits); + +#endif diff --git a/include/utils/vector.h b/include/utils/vector.h new file mode 100644 index 000000000..a236fcaf0 --- /dev/null +++ b/include/utils/vector.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#ifndef VECTOR_ +#define VECTOR_ + +#include + +int sum_i(int *x, int len); +float sum_r(float *x, int len); +_Complex float sum_c(_Complex float *x, int len); + +void *vec_malloc(int size); +void vec_fprint_c(FILE *stream, _Complex float *x, int len); +void vec_fprint_f(FILE *stream, float *x, int len); +void vec_fprint_i(FILE *stream, int *x, int len); + +void vec_sum_ch(char *z, char *x, char *y, int len); +void vec_sum_c(_Complex float *z, _Complex float *x, _Complex float *y, int len); +void vec_mult_c_r(_Complex float *x,_Complex float *y, float h, int len); +void vec_mult_c(_Complex float *x,_Complex float *y, _Complex float h, int len); +void vec_conj(_Complex float *x, _Complex float *y, int len); +float vec_power(_Complex float *x, int len); +void vec_dot_prod(_Complex float *x,_Complex float *y, _Complex float *z, int len); +void vec_dot_prod_u(_Complex float *x,_Complex float *y, _Complex float *z, int len); +void vec_max(float *x, float *max, int *pos, int len); +void vec_abs(_Complex float *x, float *abs, int len); + +#endif diff --git a/lib/CMakeLists.txt b/lib/CMakeLists.txt new file mode 100644 index 000000000..fb0c9f526 --- /dev/null +++ b/lib/CMakeLists.txt @@ -0,0 +1,28 @@ + + +file(GLOB modules *) + +SET(SOURCES_ALL "") +foreach (_module ${modules}) + if (IS_DIRECTORY ${_module}) + file(GLOB_RECURSE tmp "${_module}/src/*.c") + LIST(APPEND SOURCES_ALL ${tmp}) + endif() +endforeach() + + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + + +add_library(osld ${SOURCES_ALL}) + + + + + + + + + + + diff --git a/lib/ch_estimation/src/chest.c b/lib/ch_estimation/src/chest.c new file mode 100644 index 000000000..6f4ac002c --- /dev/null +++ b/lib/ch_estimation/src/chest.c @@ -0,0 +1,279 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include +#include + +#include "ch_estimation/chest.h" +#include "resampling/interp.h" +#include "utils/vector.h" +#include "utils/debug.h" + +#define SLOT_SZ(q) (q->nof_symbols * q->symbol_sz) +#define SF_SZ(q) (2 * SLOT_SZ(q)) + +void chest_fprint(chest_t *q, FILE *stream, int nslot, int port_id) { + chest_ref_fprint(q, stream, nslot, port_id); + chest_recvsig_fprint(q, stream, nslot, port_id); + chest_ce_fprint(q, stream, nslot, port_id); +} + +void chest_ref_fprint(chest_t *q, FILE *stream, int nslot, int port_id) { + int i; + fprintf(stream, "refs=["); + for (i=0;irefsignal[port_id][nslot].nof_refs;i++) { + fprintf(stream, "%3.3f%+3.3fi, ", __real__ q->refsignal[port_id][nslot].refs[i].simbol, + __imag__ q->refsignal[port_id][nslot].refs[i].simbol); + } + fprintf(stream, "];\n"); +} + +void chest_recvsig_fprint(chest_t *q, FILE *stream, int nslot, int port_id) { + int i; + fprintf(stream, "recvsig=["); + for (i=0;irefsignal[port_id][nslot].nof_refs;i++) { + fprintf(stream, "%3.3f%+3.3fi, ", __real__ q->refsignal[port_id][nslot].refs[i].recv_simbol, + __imag__ q->refsignal[port_id][nslot].refs[i].recv_simbol); + } + fprintf(stream, "];\n"); +} + +void chest_ce_fprint(chest_t *q, FILE *stream, int nslot, int port_id) { + int i; + fprintf(stream, "mag=["); + for (i=0;irefsignal[port_id][nslot].nof_refs;i++) { + fprintf(stream, "%3.3f, ", cabsf(q->refsignal[port_id][nslot].ch_est[i])); + } + fprintf(stream, "];\nphase=["); + for (i=0;irefsignal[port_id][nslot].nof_refs;i++) { + fprintf(stream, "%3.3f, ", atan2f(__imag__ q->refsignal[port_id][nslot].ch_est[i], + __real__ q->refsignal[port_id][nslot].ch_est[i])); + } + fprintf(stream, "];\n"); +} + +void chest_ce_ref(chest_t *q, cf_t *input, int nslot, int port_id, int nref) { + int fidx, tidx; + cf_t known_ref, channel_ref; + + fidx = q->refsignal[port_id][nslot].refs[nref].freq_idx; // reference frequency index + tidx = q->refsignal[port_id][nslot].refs[nref].time_idx; // reference time index + known_ref = q->refsignal[port_id][nslot].refs[nref].simbol; + channel_ref = input[SAMPLE_IDX(q->symbol_sz, tidx, fidx)]; + q->refsignal[port_id][nslot].refs[nref].recv_simbol = channel_ref; + /* FIXME: compare with treshold */ + if (channel_ref != 0) { + q->refsignal[port_id][nslot].ch_est[nref] = known_ref/channel_ref; + } else { + q->refsignal[port_id][nslot].ch_est[nref] = 0; + } +} + +/* Computes channel estimates for each reference in a slot and port. + * Saves the nof_prb * 12 * nof_symbols channel estimates in the array ce + */ +void chest_ce_slot_port(chest_t *q, cf_t *input, cf_t *ce, int nslot, int port_id) { + int i, j; + cf_t x[2], y[MAX_NSYMB]; + + assert(nslot >= 0 && nslot < NSLOTS_X_FRAME); + assert(port_id >= 0 && port_id < q->nof_ports); + assert(q->refsignal[port_id][nslot].nsymbols <= 2); + + refsignal_t *r = &q->refsignal[port_id][nslot]; + + INFO("Estimating channel using %d reference signals\n", r->nof_refs); + + for (i=0;inof_refs;i++) { + chest_ce_ref(q, input, nslot, port_id, i); + } + + /* interpolate the symbols with references + * in the freq domain */ + for (i=0;insymbols;i++) { + interp_linear_offset(&r->ch_est[i * r->nof_refs/2], + &ce[r->symbols_ref[i] * q->nof_prb * RE_X_RB], RE_X_RB/2, + r->nof_refs/2, r->voffset, RE_X_RB/2-r->voffset); + + } + /* now interpolate in the time domain */ + for (i=0;inof_prb * RE_X_RB; i++) { + for (j=0;jnsymbols;j++) { + x[j] = ce[r->symbols_ref[j] * q->nof_prb * RE_X_RB + i]; + printf("x[%d]=ce[%d]=%.3f\n", j, + r->symbols_ref[j] * q->nof_prb * RE_X_RB + i, + cabsf(x[j])); + } + interp_linear_offset(x, y, r->symbols_ref[1]-r->symbols_ref[0], + 2, r->symbols_ref[0], 3); + for (j=0;jnof_symbols;j++) { + printf("ce[%d] = y[%d] =%.3f\n", j * q->nof_prb * RE_X_RB + i, j, cabsf(x[j])); + ce[j * q->nof_prb * RE_X_RB + i] = y[j]; + } + } +} + +/* Computes channel estimates for each reference in a slot. + * Saves the result for the p-th port to the pointer ce[p] + */ +void chest_ce_slot(chest_t *q, cf_t *input, cf_t **ce, int nslot) { + int p; + for (p=0;pnof_ports;p++) { + chest_ce_slot_port(q, input, ce[p], nslot, p); + } +} + +int chest_init(chest_t *q, lte_cp_t cp, int nof_prb, int nof_ports) { + + if (nof_ports > MAX_PORTS) { + fprintf(stderr, "Error: Maximum ports %d\n", MAX_PORTS); + return -1; + } + bzero(q, sizeof(chest_t)); + + q->nof_ports = nof_ports; + q->nof_symbols = CP_NSYMB(cp); + q->symbol_sz = lte_symbol_sz(nof_prb); + q->cp = cp; + q->nof_prb = nof_prb; + + INFO("Initializing channel estimator size %dx%d nof_prb=%d, nof_ports=%d\n", + q->nof_symbols, q->symbol_sz, nof_prb, nof_ports); + + return 0; +} + +int chest_ref_LTEDL_slot_port(chest_t *q, int port, int nslot, int cell_id) { + if (port < 0 || port > q->nof_ports) { + return -1; + } + if (nslot < 0 || nslot > NSLOTS_X_FRAME) { + return -1; + } + + INFO("Setting LTE DL reference signals port=%d, nslot=%d, cell_id=%d\n", port, nslot, cell_id); + + if (refsignal_init_LTEDL(&q->refsignal[port][nslot], port, nslot, cell_id, q->cp, q->nof_prb)) { + fprintf(stderr, "Error initiating CRS port=%d, slot=%d\n", port, nslot); + return -1; + } + + return 0; +} + +int chest_ref_LTEDL_slot(chest_t *q, int nslot, int cell_id) { + int p; + for (p=0;pnof_ports;p++) { + if (chest_ref_LTEDL_slot_port(q, p, nslot, cell_id)) { + return -1; + } + } + return 0; +} + +int chest_ref_LTEDL(chest_t *q, int cell_id) { + int n; + for (n=0;nnof_ports;p++) { + for (n=0;nrefsignal[p][n]); + } + } + bzero(q, sizeof(chest_t)); +} + +/* Fills l[2] with the symbols in the slot nslot that contain references. + * returns the number of symbols with references (in the slot) + */ +int chest_ref_symbols(chest_t *q, int port_id, int nslot, int l[2]) { + if (nslot < 0 || nslot > NSLOTS_X_FRAME) { + return -1; + } + memcpy(l, q->refsignal[port_id][nslot].symbols_ref, sizeof(int) * q->refsignal[port_id][nslot].nsymbols); + return q->refsignal[port_id][nslot].nsymbols; +} + + +/** High-level API +*/ +int chest_initialize(chest_hl* h) { + + if (!h->init.ntime) { + h->init.ntime = 7; + } + if (!h->init.nfreq) { + h->init.nfreq = 10; + } + if (!h->init.nof_symbols) { + h->init.nof_symbols = CPNORM_NSYMB; // Normal CP + } + if (!h->init.port_id) { + h->init.port_id = 0; + } + if (!h->init.cell_id) { + h->init.cell_id = 0; + } + if (!h->init.nof_prb) { + h->init.nof_prb = 6; + } + +/* if (chest_LTEDL_init(&h->obj, h->init.ntime, h->init.nfreq, + h->init.nof_symbols==CPNORM_NSYMB, h->init.cell_id, h->init.nof_prb)) { + return -1; + } +*/ + return 0; +} + +/** This function can be called in a subframe (1ms) or slot basis (0.5ms) for LTE */ +int chest_work(chest_hl* hl) { + chest_t *q = &hl->obj; + /* + if (hl->in_len == SF_SZ(q)) { + *hl->out_len = chest_LTEDL_run_sf(q, hl->input, hl->output, hl->ctrl_in.slot_id/2); + } else if (hl->in_len == SLOT_SZ(q)) { + *hl->out_len = chest_LTEDL_run_slot(q, hl->input, hl->output, hl->ctrl_in.slot_id); + } + */ + + if (*hl->out_len < 0) { + return -1; + } else { + return 0; + } + +} + +int chest_stop(chest_hl* hl) { + chest_free(&hl->obj); + return 0; +} + + diff --git a/lib/ch_estimation/src/refsignal.c b/lib/ch_estimation/src/refsignal.c new file mode 100644 index 000000000..f79cfe59a --- /dev/null +++ b/lib/ch_estimation/src/refsignal.c @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include + +#include "lte/base.h" +#include "ch_estimation/refsignal.h" +#include "utils/vector.h" +#include "utils/debug.h" +#include "lte/sequence.h" + +#define idx(x, y) (l*nof_refs_x_symbol+i) + +int refsignal_v(int port_id, int ns, int symbol_id) { + int v=-1; + switch(port_id) { + case 0: + if (symbol_id == 0) { + v=0; + } else { + v=3; + } + break; + case 1: + if (symbol_id == 0) { + v=3; + } else { + v=0; + } + break; + case 2: + v=3*(ns%2); + break; + case 3: + v=3+3*(ns%2); + break; + } + return v; +} + +int refsignal_k(int m, int v, int cell_id) { + return 6*m+((v+(cell_id%6))%6); +} + +/** Initializes refsignal_t object according to 3GPP 36.211 6.10.1 + * + */ +int refsignal_init_LTEDL(refsignal_t *q, int port_id, int nslot, + int cell_id, lte_cp_t cp, int nof_prb) { + + unsigned int c_init; + int ns, l, lp[2]; + int N_cp; + int i; + int ret = -1; + sequence_t seq; + int v; + int mp; + int nof_refs_x_symbol, nof_ref_symbols; + + bzero(q, sizeof(refsignal_t)); + bzero(&seq, sizeof(sequence_t)); + + if (CP_ISNORM(cp)) { + N_cp = 1; + } else { + N_cp = 0; + } + + if (port_id < 0 || port_id > (MAX_PORTS - 1)) { + fprintf(stderr, "Invalid port id %d\n", port_id); + return -1; + } + + if (port_id < 2) { + nof_ref_symbols = 2; + lp[0] = 0; + lp[1] = CP_NSYMB(cp) - 3; + } else { + nof_ref_symbols = 1; + lp[0] = 1; + } + nof_refs_x_symbol = 2 * nof_prb; + + q->nof_refs = nof_refs_x_symbol * nof_ref_symbols; + q->nsymbols = nof_ref_symbols; + q->symbols_ref = malloc(sizeof(int) * nof_ref_symbols); + q->voffset = cell_id%6; + if (!q->symbols_ref) { + return -1; + } + + memcpy(q->symbols_ref, lp, sizeof(int) * nof_ref_symbols); + + DEBUG("Initializing %d CRS for LTE DL slot=%d, %d RE in %d symbols\n", + q->nof_refs, nslot, nof_refs_x_symbol, nof_ref_symbols); + + q->refs = vec_malloc(q->nof_refs * sizeof(ref_t)); + if (!q->refs) { + goto free_and_exit; + } + q->ch_est = vec_malloc(q->nof_refs * sizeof(cf_t)); + if (!q->ch_est) { + goto free_and_exit; + } + + ns = nslot; + for (l = 0; l < nof_ref_symbols; l++) { + + c_init = 1024 * (7 * (ns + 1) + lp[l] + 1) * (2 * cell_id + 1) + + 2 * cell_id + N_cp; + + if (sequence_LTEPRS(&seq, 2 * 2 * MAX_PRB, c_init)) { + goto free_and_exit; + } + + v = refsignal_v(port_id, ns, lp[l]); + + for (i = 0; i < nof_refs_x_symbol; i++) { + mp = i + MAX_PRB - nof_prb; + + /* generate signal */ + __real__ q->refs[idx(l,i)].simbol = (1 - 2 * (float) seq.c[2 * mp]) / sqrt(2); + __imag__ q->refs[idx(l,i)].simbol = (1 - 2 * (float) seq.c[2 * mp + 1]) / sqrt(2); + + /* mapping to resource elements */ + q->refs[idx(l,i)].freq_idx = refsignal_k(i, v, cell_id)+GUARD_RE(nof_prb); + q->refs[idx(l,i)].time_idx = lp[l]; + + /* print only first slot */ + if (ns == 0) { + DEBUG("(%-2d,%2d) is mapped to (%-2d,%2d) (mp=%d, v=%d)\n", + l,i,q->refs[idx(l,i)].time_idx, q->refs[idx(l,i)].freq_idx-GUARD_RE(nof_prb), mp, v); + } + } + } + + ret = 0; +free_and_exit: + sequence_free(&seq); + if (ret == -1) { + refsignal_free(q); + } + return ret; +} + +void refsignal_free(refsignal_t *q) { + if (q->symbols_ref) { + free(q->symbols_ref); + } + if (q->refs) { + free(q->refs); + } + if (q->ch_est) { + free(q->ch_est); + } + bzero(q, sizeof(refsignal_t)); +} + + diff --git a/lib/channel/src/ch_awgn.c b/lib/channel/src/ch_awgn.c new file mode 100644 index 000000000..7509c8b21 --- /dev/null +++ b/lib/channel/src/ch_awgn.c @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include + +#include "gauss.h" +#include "channel/ch_awgn.h" + +void ch_awgn(const cf* x, cf* y, float variance, int buff_sz) { + _Complex float tmp; + int i; + + for (i=0;iinput,hl->output,hl->ctrl_in.variance,hl->in_len); + if (hl->out_len) { + *hl->out_len = hl->in_len; + } + return 0; +} diff --git a/lib/channel/src/gauss.c b/lib/channel/src/gauss.c new file mode 100644 index 000000000..efdd090c0 --- /dev/null +++ b/lib/channel/src/gauss.c @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include + +float rand_gauss (void) { + float v1,v2,s; + + do { + v1 = 2.0 * ((float) rand()/RAND_MAX) - 1; + v2 = 2.0 * ((float) rand()/RAND_MAX) - 1; + + s = v1*v1 + v2*v2; + } while ( s >= 1.0 ); + + if (s == 0.0) + return 0.0; + else + return (v1*sqrt(-2.0 * log(s) / s)); +} diff --git a/lib/channel/src/gauss.h b/lib/channel/src/gauss.h new file mode 100644 index 000000000..2cb14b179 --- /dev/null +++ b/lib/channel/src/gauss.h @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +float rand_gauss (void); diff --git a/lib/fec/src/convcoder.c b/lib/fec/src/convcoder.c new file mode 100644 index 000000000..ddc992ca9 --- /dev/null +++ b/lib/fec/src/convcoder.c @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +/**@TODO frontend to FEC library if installed + */ +#include +#include +#include + +#include "fec/convcoder.h" +#include "parity.h" +#include "viterbi37.h" + +#define DEB 0 + +int decode37(void *o, float *symbols, char *data) { + viterbi_t *q = o; + int i; + int len = q->tail_biting ? q->framebits : (q->framebits + q->K - 1); + float amp = 0; + + for (i=0;i<3*len;i++) { + if (fabsf(symbols[i] > amp)) { + amp = symbols[i]; + } + } + + /* Decode it and make sure we get the right answer */ + /* Initialize Viterbi decoder */ + init_viterbi37_port(q->ptr, q->tail_biting?-1:0); + + /* Decode block */ + update_viterbi37_blk_port(q->ptr, symbols,q->framebits + q->K - 1, amp, len); + + /* Do Viterbi chainback */ + chainback_viterbi37_port(q->ptr, data, q->framebits, 0); + + return q->framebits; +} + +void free37(void *o) { + viterbi_t *q = o; + delete_viterbi37_port(q->ptr); +} + +int init37(viterbi_t *q, int poly[3], int framebits, bool tail_biting) { + q->K = 7; + q->R = 3; + q->framebits = framebits; + q->tail_biting = tail_biting; + q->decode = decode37; + q->free = free37; + + if ((q->ptr = create_viterbi37_port(poly, framebits, tail_biting)) == NULL) { + fprintf(stderr, "create_viterbi37 failed\n"); + return -1; + } else { + return 0; + } +} + +int viterbi_init(viterbi_t *q, viterbi_type_t type, int poly[3], int framebits, bool tail_bitting) { + switch(type) { + case CONVCODER_37: + return init37(q, poly, framebits, tail_bitting); + default: + fprintf(stderr, "Decoder not implemented\n"); + return -1; + } +} + +void viterbi_free(viterbi_t *q) { + q->free(q); +} + +/* symbols are real-valued */ +int viterbi_decode(viterbi_t *q, float *symbols, char *data) { + return q->decode(q, symbols, data); +} + + +int conv_encode(convcoder_t *q, char *input, char *output) { + unsigned int sr; + int i,j; + int len = q->tail_biting ? q->framelength : (q->framelength + q->K - 1); + + if (q->tail_biting) { + sr = 0; + for (i=q->framelength - q->K + 1; iframelength; i++) { + if (DEB) printf("%3d: sr=%3d, bit=%d\n",i,sr&7,input[i]); + sr = (sr << 1) | (input[i] & 1); + } + } else { + sr = 0; + } + + if (DEB) printf("state st=%d\n",sr&7); + for (i = 0; i < len; i++) { + int bit = (i < q->framelength) ? (input[i] & 1) : 0; + sr = (sr << 1) | bit; + if (DEB) printf("%d, ",input[i]); + for (j=0;jR;j++) { + output[q->R * i + j] = parity(sr & q->poly[j]); + } + } + if (DEB) printf("\n"); + if (DEB) printf("state fin=%u\n",sr&7); + return q->R*len; +} + diff --git a/lib/fec/src/crc.c b/lib/fec/src/crc.c new file mode 100644 index 000000000..a10475a78 --- /dev/null +++ b/lib/fec/src/crc.c @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2012, Ismael Gomez-Miguelez . + * This file is part of ALOE++ (http://flexnets.upc.edu/) + * + * ALOE++ is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ALOE++ is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ALOE++. If not, see . + */ + + +unsigned int cword; + +unsigned int icrc1(unsigned int crc, unsigned short onech,int long_crc, + int left_shift,unsigned int poly) +{ + int i; + unsigned int tmp=(unsigned int) (crc ^ (onech << (long_crc >> 1) )); + + for (i=0;i>long_crc), + data,long_crc,i,poly)<>long_crc; + } + + ret=cword; + if (paste_word) { + cword<<=32-long_crc; + for (i=0;i>31); + cword<<=1; + } + } + return (ret); +} + + diff --git a/lib/fec/src/parity.h b/lib/fec/src/parity.h new file mode 100644 index 000000000..58cd67ab5 --- /dev/null +++ b/lib/fec/src/parity.h @@ -0,0 +1,36 @@ +/* User include file for libfec + * Copyright 2004, Phil Karn, KA9Q + * May be used under the terms of the GNU Lesser General Public License (LGPL) + */ + + +#ifdef __x86_64__ +#define __i386__ +#endif + +/* Determine parity of argument: 1 = odd, 0 = even */ +#ifdef __i386__ +static inline int parityb(unsigned char x){ + __asm__ __volatile__ ("test %1,%1;setpo %0" : "=g"(x) : "r" (x)); + return x; +} +#else +void partab_init(); + +static inline int parityb(unsigned char x){ + extern unsigned char Partab[256]; + extern int P_init; + if(!P_init){ + partab_init(); + } + return Partab[x]; +} +#endif + + +static inline int parity(int x){ + /* Fold down to one byte */ + x ^= (x >> 16); + x ^= (x >> 8); + return parityb(x); +} diff --git a/lib/fec/src/viterbi37.h b/lib/fec/src/viterbi37.h new file mode 100644 index 000000000..c91d30e2a --- /dev/null +++ b/lib/fec/src/viterbi37.h @@ -0,0 +1,7 @@ +#include + +void *create_viterbi37_port(int polys[3], int len, bool tail_biting); +int init_viterbi37_port(void *p, int starting_state); +int chainback_viterbi37_port(void *p, char *data, unsigned int nbits, unsigned int endstate); +void delete_viterbi37_port(void *p); +int update_viterbi37_blk_port(void *p, float *syms, int nbits, float amp, int framebits); diff --git a/lib/fec/src/viterbi37_port.c b/lib/fec/src/viterbi37_port.c new file mode 100644 index 000000000..e1467c3b6 --- /dev/null +++ b/lib/fec/src/viterbi37_port.c @@ -0,0 +1,230 @@ +/* K=9 r=1/3 Viterbi decoder in portable C + * Copyright Aug 2006, Phil Karn, KA9Q + * May be used under the terms of the GNU Lesser General Public License (LGPL) + */ +#include +#include +#include + +#include "parity.h" +#include "viterbi37.h" +#include "utils/debug.h" + +typedef union { + unsigned int w[64]; +} metric_t; +typedef union { + unsigned long w[2]; +} decision_t; + +static union { + unsigned char c[32]; +} Branchtab37[3]; + +#define DEB 0 + +/* State info for instance of Viterbi decoder */ +struct v37 { + metric_t metrics1; /* path metric buffer 1 */ + metric_t metrics2; /* path metric buffer 2 */ + decision_t *dp; /* Pointer to current decision */ + metric_t *old_metrics, *new_metrics; /* Pointers to path metrics, swapped on every bit */ + decision_t *decisions; /* Beginning of decisions for block */ +}; + +/* Initialize Viterbi decoder for start of new frame */ +int init_viterbi37_port(void *p, int starting_state) { + struct v37 *vp = p; + int i; + + if (p == NULL) + return -1; + + for (i = 0; i < 64; i++) + vp->metrics1.w[i] = 63; + + vp->old_metrics = &vp->metrics1; + vp->new_metrics = &vp->metrics2; + vp->dp = vp->decisions; + if (starting_state != -1) { + vp->old_metrics->w[starting_state & 63] = 0; /* Bias known start state */ + } + return 0; +} + +void set_viterbi37_polynomial_port(int polys[3]) { + int state; + + for (state = 0; state < 32; state++) { + Branchtab37[0].c[state] = + (polys[0] < 0) ^ parity((2 * state) & abs(polys[0])) ? 255 : 0; + Branchtab37[1].c[state] = + (polys[1] < 0) ^ parity((2 * state) & abs(polys[1])) ? 255 : 0; + Branchtab37[2].c[state] = + (polys[2] < 0) ^ parity((2 * state) & abs(polys[2])) ? 255 : 0; + } +} + +/* Create a new instance of a Viterbi decoder */ +void *create_viterbi37_port(int polys[3], int len, bool tail_biting) { + struct v37 *vp; + + set_viterbi37_polynomial_port(polys); + + if ((vp = (struct v37 *) malloc(sizeof(struct v37))) == NULL) + return NULL ; + + if ((vp->decisions = (decision_t *) malloc((len + 6) * sizeof(decision_t))) + == NULL) { + free(vp); + return NULL ; + } + init_viterbi37_port(vp, tail_biting?-1:0); + + return vp; +} + +/* Viterbi chainback */ +int chainback_viterbi37_port(void *p, char *data, /* Decoded output data */ + unsigned int nbits, /* Number of data bits */ + unsigned int endstate) { /* Terminal encoder state */ + + struct v37 *vp = p; + decision_t *d; + + if (p == NULL) + return -1; + + d = vp->decisions; + + /* Make room beyond the end of the encoder register so we can + * accumulate a full byte of decoded data + */ + + endstate=0; + /* The store into data[] only needs to be done every 8 bits. + * But this avoids a conditional branch, and the writes will + * combine in the cache anyway + */ + d += 6; /* Look past tail */ + while (nbits-- != 0) { + int k; + + k = (d[nbits].w[(endstate >> 2) / 32] >> ((endstate >> 2) % 32)) & 1; + endstate = (endstate >> 1) | (k << 7); + data[nbits] = k; + } + return 0; +} + +/* Delete instance of a Viterbi decoder */ +void delete_viterbi37_port(void *p) { + struct v37 *vp = p; + + if (vp != NULL) { + free(vp->decisions); + free(vp); + } +} + +/* C-language butterfly */ +#define BFLY(i) {\ +unsigned int metric,m0,m1,decision;\ + metric = (Branchtab37[0].c[i] ^ sym0) + (Branchtab37[1].c[i] ^ sym1) + \ + (Branchtab37[2].c[i] ^ sym2);\ + m0 = vp->old_metrics->w[i] + metric;\ + m1 = vp->old_metrics->w[i+32] + (765 - metric);\ + decision = (signed int)(m0-m1) > 0;\ + vp->new_metrics->w[2*i] = decision ? m1 : m0;\ + d->w[i/16] |= decision << ((2*i)&31);\ + m0 -= (metric+metric-765);\ + m1 += (metric+metric-765);\ + decision = (signed int)(m0-m1) > 0;\ + vp->new_metrics->w[2*i+1] = decision ? m1 : m0;\ + d->w[i/16] |= decision << ((2*i+1)&31);\ +} + +unsigned char tochar_clip(float sym, float amp) { + float ret = sym * (127.5 / amp) + 127.5; + if (ret > 255) { + ret = 255; + } + if (ret < 0) { + ret = 0; + } + return (unsigned char) ret; +} + +/* Update decoder with a block of demodulated symbols + * Note that nbits is the number of decoded data bits, not the number + * of symbols! + */ + +int update_viterbi37_blk_port(void *p, float *syms, int nbits, float amp, int framebits) { + struct v37 *vp = p; + decision_t *d; + + if (p == NULL) + return -1; + int k=0; + d = (decision_t *) vp->dp; + + while (nbits--) { + void *tmp; + unsigned char sym0, sym1, sym2; + + d->w[0] = d->w[1] = 0; + + k++; + + if (k < framebits) { + sym0 = tochar_clip(*syms++, amp); + sym1 = tochar_clip(*syms++, amp); + sym2 = tochar_clip(*syms++, amp); + } else { + sym0=255; + sym1=255; + sym2=255; + } + + BFLY(0); + BFLY(1); + BFLY(2); + BFLY(3); + BFLY(4); + BFLY(5); + BFLY(6); + BFLY(7); + BFLY(8); + BFLY(9); + BFLY(10); + BFLY(11); + BFLY(12); + BFLY(13); + BFLY(14); + BFLY(15); + BFLY(16); + BFLY(17); + BFLY(18); + BFLY(19); + BFLY(20); + BFLY(21); + BFLY(22); + BFLY(23); + BFLY(24); + BFLY(25); + BFLY(26); + BFLY(27); + BFLY(28); + BFLY(29); + BFLY(30); + BFLY(31); + + d++; + tmp = vp->old_metrics; + vp->old_metrics = vp->new_metrics; + vp->new_metrics = tmp; + } + vp->dp = d; + return 0; +} diff --git a/lib/filter/src/filter2d.c b/lib/filter/src/filter2d.c new file mode 100644 index 000000000..8da3d4264 --- /dev/null +++ b/lib/filter/src/filter2d.c @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include + +#include "utils/debug.h" + +#include "filter/filter2d.h" +#include "utils/matrix.h" +#include "utils/vector.h" +#include "utils/debug.h" + +/* Useful macros */ +#define intceil(X, Y) ((X-1)/Y+1) + +#define idx(a, b) ((a)*(q->szfreq)+b) + +int filter2d_init(filter2d_t* q, float **taps, int ntime, int nfreq, int sztime, + int szfreq) { + + int ret = -1; + bzero(q, sizeof(filter2d_t)); + + if (matrix_init((void***)&q->taps, ntime, nfreq, sizeof(float))) { + goto free_and_exit; + } + + matrix_copy((void**) q->taps, (void**) taps, ntime, nfreq, sizeof(float)); + + q->output = vec_malloc((ntime+sztime)*(szfreq)*sizeof(cf_t)); + if (!q->output) { + goto free_and_exit; + } + + bzero(q->output, (ntime+sztime)*(szfreq)*sizeof(cf_t)); + + q->nfreq = nfreq; + q->ntime = ntime; + q->szfreq = szfreq; + q->sztime = sztime; + + ret = 0; + + free_and_exit: if (ret == -1) { + filter2d_free(q); + } + return ret; +} + +void filter2d_free(filter2d_t *q) { + + matrix_free((void**) q->taps, q->ntime); + if (q->output) { + free(q->output); + } + bzero(q, sizeof(filter2d_t)); +} + +int filter2d_init_default(filter2d_t* q, int ntime, int nfreq, int sztime, + int szfreq) { + + int i, j; + int ret = -1; + float **taps; + + if (matrix_init((void***) &taps, ntime, nfreq, sizeof(float))) { + goto free_and_exit; + } + + /* Compute the default 2-D interpolation mesh */ + for (i = 0; i < ntime; i++) { + for (j = 0; j < nfreq; j++) { + if (j < nfreq / 2) + taps[i][j] = (j + 1.0) / (2.0 * intceil(nfreq, 2)); + + else if (j == nfreq / 2) + taps[i][j] = 0.5; + + else if (j > nfreq / 2) + taps[i][j] = (nfreq - j) / (2.0 * intceil(nfreq, 2)); + } + } + + INFO("Using default interpolation matrix of size %dx%d\n", ntime, nfreq); + if (verbose >= VERBOSE_DEBUG) { + matrix_fprintf_f(stdout, taps, ntime, nfreq); + } + + if (filter2d_init(q, taps, ntime, nfreq, sztime, szfreq)) { + goto free_and_exit; + } + + ret = 0; +free_and_exit: + matrix_free((void**) taps, ntime); + return ret; +} + +/* Moves the last ntime symbols to the start and clears the remaining of the output. + * Should be called, for instance, before filtering each OFDM frame. + */ +void filter2d_reset(filter2d_t *q) { + int i; + + for (i = 0; i < q->ntime; i++) { + memcpy(&q->output[idx(i,0)], &q->output[idx(q->sztime + i,0)], + sizeof(cf_t) * (q->szfreq)); + } + for (; i < q->ntime + q->sztime; i++) { + memset(&q->output[idx(i,0)], 0, sizeof(cf_t) * (q->szfreq)); + } +} + +/** Adds samples x to the from the given time/freq indexes to the filter + * and computes the output. + */ +void filter2d_add(filter2d_t *q, cf_t x, int time_idx, int freq_idx) { + int i, j; + + int ntime = q->ntime; + int nfreq = q->nfreq; + + for (i = 0; i < ntime; i++) { + for (j = 0; j < nfreq; j++) { + q->output[idx(i+time_idx, j+freq_idx - nfreq/2)] += x * (cf_t)(q->taps[i][j]); + } + } +} diff --git a/lib/io/src/binsource.c b/lib/io/src/binsource.c new file mode 100644 index 000000000..88baa3e4f --- /dev/null +++ b/lib/io/src/binsource.c @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include + +#include "io/binsource.h" +#include "utils/bit.h" + +#define DIV(a,b) ((a-1)/b+1) + + +/* Internal functions */ +static int gen_seq_buff(binsource_t* q, int nwords) { + if (q->seq_buff_nwords != nwords) { + free(q->seq_buff); + q->seq_buff_nwords = 0; + } + if (!q->seq_buff_nwords) { + q->seq_buff = malloc(nwords*sizeof(uint32_t)); + if (!q->seq_buff) { + return -1; + } + q->seq_buff_nwords = nwords; + } + for (int i=0;iseq_buff_nwords;i++) { + q->seq_buff[i] = rand_r(&q->seed); + } + return 0; +} + +/* Low-level API */ + +/** + * Initializes the binsource object. + */ +void binsource_init(binsource_t* q) { + bzero((void*) q,sizeof(binsource_t)); +} + +/** + * Destroys binsource object + */ +void binsource_free(binsource_t* q) { + if (q->seq_buff) { + free(q->seq_buff); + } + bzero(q, sizeof(binsource_t)); +} + +/** + * Sets a new seed + */ +void binsource_seed_set(binsource_t* q, unsigned int seed) { + q->seed = seed; +} + +/** + * Sets local time as seed. + */ +void binsource_seed_time(binsource_t *q) { + struct timeval t1; + gettimeofday(&t1, NULL); + q->seed = t1.tv_usec * t1.tv_sec; +} + +/** + * Generates a sequence of nbits random bits + */ +int binsource_cache_gen(binsource_t* q, int nbits) { + if (gen_seq_buff(q,DIV(nbits,32))) { + return -1; + } + q->seq_cache_nbits = nbits; + q->seq_cache_rp = 0; + return 0; +} + +static int int_2_bits(uint32_t* src, char* dst, int nbits) { + int n; + n=nbits/32; + for (int i=0;iseq_cache_rp += int_2_bits(&q->seq_buff[q->seq_cache_rp],bits,nbits); +} + +/** + * Stores in the bits buffer a sequence of nbits pseudo-random bits. + * Overwrites the bits generated using binsource_cache_gen. + */ +int binsource_generate(binsource_t* q, char *bits, int nbits) { + + if (gen_seq_buff(q,DIV(nbits,32))) { + return -1; + } + int_2_bits(q->seq_buff,bits,nbits); + return 0; +} + + + + + +/* High-Level API */ +int binsource_initialize(binsource_hl* hl) { + + binsource_init(&hl->obj); + if (hl->init.seed) { + binsource_seed_set(&hl->obj,hl->init.seed); + } else { + binsource_seed_time(&hl->obj); + } + + if (hl->init.cache_seq_nbits) { + if (binsource_cache_gen(&hl->obj,hl->init.cache_seq_nbits)) { + return -1; + } + } + + return 0; +} + + +int binsource_work(binsource_hl* hl) { + int ret = -1; + + if (hl->init.cache_seq_nbits) { + binsource_cache_cpy(&hl->obj,hl->output,hl->ctrl_in.nbits); + ret = 0; + } else { + ret = binsource_generate(&hl->obj,hl->output,hl->ctrl_in.nbits); + } + if (hl->out_len) { + if (!ret) { + *hl->out_len = hl->ctrl_in.nbits; + } else { + *hl->out_len = 0; + } + } + return ret; +} + +int binsource_stop(binsource_hl* hl) { + binsource_free(&hl->obj); + return 0; +} + diff --git a/lib/io/src/filesink.c b/lib/io/src/filesink.c new file mode 100644 index 000000000..ac128dfe1 --- /dev/null +++ b/lib/io/src/filesink.c @@ -0,0 +1,92 @@ + +#include +#include +#include +#include + + +#include "io/filesink.h" + +int filesink_init(filesink_t *q, char *filename, file_data_type_t type) { + bzero(q, sizeof(filesink_t)); + q->f = fopen(filename, "w"); + if (!q->f) { + perror("fopen"); + return -1; + } + q->type = type; + return 0; +} + +void filesink_close(filesink_t *q) { + if (q->f) { + fclose(q->f); + } + bzero(q, sizeof(filesink_t)); +} + +int filesink_write(filesink_t *q, void *buffer, int nsamples) { + int i; + float *fbuf = (float*) buffer; + _Complex float *cbuf = (_Complex float*) buffer; + _Complex short *sbuf = (_Complex short*) buffer; + int size; + + switch(q->type) { + case FLOAT: + for (i=0;if,"%g\n",fbuf[i]); + } + break; + case COMPLEX_FLOAT: + for (i=0;i= 0) + fprintf(q->f,"%g+%gi\n",__real__ cbuf[i],__imag__ cbuf[i]); + else + fprintf(q->f,"%g-%gi\n",__real__ cbuf[i],fabsf(__imag__ cbuf[i])); + } + break; + case COMPLEX_SHORT: + for (i=0;i= 0) + fprintf(q->f,"%hd+%hdi\n",__real__ sbuf[i],__imag__ sbuf[i]); + else + fprintf(q->f,"%hd-%hdi\n",__real__ sbuf[i],(short) abs(__imag__ sbuf[i])); + } + break; + case FLOAT_BIN: + case COMPLEX_FLOAT_BIN: + case COMPLEX_SHORT_BIN: + if (q->type == FLOAT_BIN) { + size = sizeof(float); + } else if (q->type == COMPLEX_FLOAT_BIN) { + size = sizeof(_Complex float); + } else if (q->type == COMPLEX_SHORT_BIN) { + size = sizeof(_Complex short); + } + return fwrite(buffer, size, nsamples, q->f); + break; + default: + i = -1; + break; + } + return i; +} + + + +int filesink_initialize(filesink_hl* h) { + return filesink_init(&h->obj, h->init.file_name, h->init.data_type); +} + +int filesink_work(filesink_hl* h) { + if (filesink_write(&h->obj, h->input, h->in_len)<0) { + return -1; + } + return 0; +} + +int filesink_stop(filesink_hl* h) { + filesink_close(&h->obj); + return 0; +} diff --git a/lib/io/src/filesource.c b/lib/io/src/filesource.c new file mode 100644 index 000000000..6677d7eae --- /dev/null +++ b/lib/io/src/filesource.c @@ -0,0 +1,105 @@ + +#include +#include +#include + +#include "io/filesource.h" + +int filesource_init(filesource_t *q, char *filename, file_data_type_t type) { + bzero(q, sizeof(filesource_t)); + q->f = fopen(filename, "r"); + if (!q->f) { + perror("fopen"); + return -1; + } + q->type = type; + return 0; +} + +void filesource_close(filesource_t *q) { + if (q->f) { + fclose(q->f); + } + bzero(q, sizeof(filesource_t)); +} + +int read_complex_f(FILE *f, _Complex float *y) { + char in_str[64]; + _Complex float x; + if (NULL == fgets(in_str, 64, f)) { + return -1; + } else { + if (index(in_str, 'i') || index(in_str, 'j')) { + sscanf(in_str,"%f%fi",&(__real__ x),&(__imag__ x)); + } else { + __imag__ x = 0; + sscanf(in_str,"%f",&(__real__ x)); + } + *y = x; + return 0; + } +} + +int filesource_read(filesource_t *q, void *buffer, int nsamples) { + int i; + float *fbuf = (float*) buffer; + _Complex float *cbuf = (_Complex float*) buffer; + _Complex short *sbuf = (_Complex short*) buffer; + int size; + + switch(q->type) { + case FLOAT: + for (i=0;if,"%g\n",&fbuf[i])) + break; + } + break; + case COMPLEX_FLOAT: + for (i=0;if, &cbuf[i])) { + break; + } + } + break; + case COMPLEX_SHORT: + for (i=0;if,"%hd%hdi\n",&(__real__ sbuf[i]),&(__imag__ sbuf[i]))) + break; + } + break; + case FLOAT_BIN: + case COMPLEX_FLOAT_BIN: + case COMPLEX_SHORT_BIN: + if (q->type == FLOAT_BIN) { + size = sizeof(float); + } else if (q->type == COMPLEX_FLOAT_BIN) { + size = sizeof(_Complex float); + } else if (q->type == COMPLEX_SHORT_BIN) { + size = sizeof(_Complex short); + } + return fread(buffer, size, nsamples, q->f); + break; + default: + i = -1; + break; + } + return i; +} + + +int filesource_initialize(filesource_hl* h) { + return filesource_init(&h->obj, h->init.file_name, h->init.data_type); +} + +int filesource_work(filesource_hl* h) { + *h->out_len = filesource_read(&h->obj, h->output, h->ctrl_in.nsamples); + if (*h->out_len < 0) { + return -1; + } + return 0; +} + +int filesource_stop(filesource_hl* h) { + filesource_close(&h->obj); + return 0; +} diff --git a/lib/lte/src/fft.c b/lib/lte/src/fft.c new file mode 100644 index 000000000..5195140f6 --- /dev/null +++ b/lib/lte/src/fft.c @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ +#include +#include + +#include "lte/base.h" +#include "utils/dft.h" +#include "lte/fft.h" + +int lte_fft_init_(lte_fft_t *q, lte_cp_t cp_type, int symbol_sz, dft_dir_t dir) { + + if (dft_plan_c2c(symbol_sz, dir, &q->fft_plan)) { + return -1; + } + q->fft_plan.options = DFT_DC_OFFSET | DFT_MIRROR_POS | DFT_NORMALIZE; + q->nof_symbols = CP_NSYMB(cp_type); + q->symbol_sz = symbol_sz; + q->cp_type = cp_type; + return 0; +} +void lte_fft_free_(lte_fft_t *q) { + dft_plan_free(&q->fft_plan); + bzero(q, sizeof(lte_fft_t)); +} + +int lte_fft_init(lte_fft_t *q, lte_cp_t cp_type, int symbol_sz) { + return lte_fft_init_(q, cp_type, symbol_sz, FORWARD); +} +void lte_fft_free(lte_fft_t *q) { + lte_fft_free_(q); +} +int lte_ifft_init(lte_fft_t *q, lte_cp_t cp_type, int symbol_sz) { + return lte_fft_init_(q, cp_type, symbol_sz, BACKWARD); +} +void lte_ifft_free(lte_fft_t *q) { + lte_fft_free_(q); +} + +/* Transforms input samples into output OFDM symbols. + * Performs FFT on a each symbol and removes CP. + */ +void lte_fft_run(lte_fft_t *q, cf_t *input, cf_t *output) { + int i; + for (i=0;inof_symbols;i++) { + input += CP_ISNORM(q->cp_type)?CP_NORM(i, q->symbol_sz):CP_EXT(q->symbol_sz); + dft_run_c2c(&q->fft_plan, input, output); + input += q->symbol_sz; + output += q->symbol_sz; + } +} + +/* Transforms input OFDM symbols into output samples. + * Performs FFT on a each symbol and adds CP. + */ +void lte_ifft_run(lte_fft_t *q, cf_t *input, cf_t *output) { + fprintf(stderr, "Error: Not implemented\n"); +} + diff --git a/lib/lte/src/lte.c b/lib/lte/src/lte.c new file mode 100644 index 000000000..a80914278 --- /dev/null +++ b/lib/lte/src/lte.c @@ -0,0 +1,203 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#include +#include + +#include "lte/base.h" + +const int lte_symbol_sz(int nof_prb) { + if (nof_prb<=0) { + return -1; + } + if (nof_prb<=6) { + return 128; + } else if (nof_prb<=15) { + return 256; + } else if (nof_prb<=25) { + return 512; + } else if (nof_prb<=50) { + return 1024; + } else if (nof_prb<=75) { + return 1536; + } else if (nof_prb<=100) { + return 2048; + } + return -1; +} + +int lte_voffset(int symbol_id, int cell_id, int nof_ports) { + if (nof_ports == 1 && symbol_id==0) { + return (cell_id+3) % 6; + } else { + return cell_id % 6; + } +} + +/* Returns the number of available RE per PRB */ +int lte_re_x_prb(int ns, int symbol, int nof_ports, int nof_symbols) { + if (symbol == 0) { + if (((ns % 2) == 0) || (ns == 1)) { + return RE_X_RB - 4; + } else { + if (nof_ports == 1) { + return RE_X_RB - 2; + } else { + return RE_X_RB - 4; + } + } + } else if (symbol == 1) { + if (ns == 1) { + return RE_X_RB - 4; + } else if (nof_ports == 4) { + return RE_X_RB - 4; + } else { + return RE_X_RB; + } + } else if (symbol == nof_symbols - 3) { + if (nof_ports == 1) { + return RE_X_RB - 2; + } else { + return RE_X_RB - 4; + } + } else { + return RE_X_RB; + } +} + + +struct lte_band { + int band; + float fd_low_mhz; + int earfcn_offset; + int earfcn_max; + enum band_geographical_area area; +}; + +struct lte_band lte_bands[NOF_LTE_BANDS] = { + {1, 2110, 0, 599, ALL}, + {2, 1930, 600, 1199, NAR}, + {3, 1805, 1200, 1949, ALL}, + {4, 2110, 1950, 2399, NAR}, + {5, 869, 2400, 2649, NAR}, + {6, 875, 2650, 2749, APAC}, + {7, 2620, 2750, 3449, EMEA}, + {8, 925, 3450, 3799, ALL}, + {9, 1844.9, 3800, 4149, APAC}, + {10, 2110, 4150, 4749, NAR}, + {11, 1475.9, 4750, 4949, JAPAN}, + {12, 729, 5010, 5179, NAR}, + {13, 746, 5180, 5279, NAR}, + {14, 758, 5280, 5379, NAR}, + {17, 734, 5730, 5849, NAR}, + {18, 860, 5850, 5999, JAPAN}, + {19, 875, 6000, 6149, JAPAN}, + {20, 791, 6150, 6449, EMEA}, + {21, 1495.9, 6450, 6599, JAPAN}, + {22, 3500, 6600, 7399, NA}, + {23, 2180, 7500, 7699, NAR}, + {24, 1525, 7700, 8039, NAR}, + {25, 1930, 8040, 8689, NAR}, + {26, 859, 8690, 9039, NAR}, + {27, 852, 9040, 9209, NAR}, + {28, 758, 9210, 9659, APAC}, + {29, 717, 9660, 9769, NAR}, + {30, 2350, 9770, 9869, NAR}, + {31, 462.5, 9870, 9919, CALA} +}; +#define EOF_BAND 9919 + +float get_fd(struct lte_band *band, int earfcn) { + return band->fd_low_mhz + 0.1*(earfcn - band->earfcn_offset); +} + +float lte_band_fd(int earfcn) { + int i; + i=0; + while(i < NOF_LTE_BANDS && lte_bands[i].earfcn_offset lte_bands[i].earfcn_max) { + fprintf(stderr, "Error: Invalid end earfcn %d. Max is %d\n", end_earfcn, lte_bands[i].earfcn_max); + return -1; + } + } + if (start_earfcn == -1) { + start_earfcn = lte_bands[i].earfcn_offset; + } else { + if (start_earfcn < lte_bands[i].earfcn_offset) { + fprintf(stderr, "Error: Invalid start earfcn %d. Min is %d\n", start_earfcn, lte_bands[i].earfcn_offset); + return -1; + } + } + nof_earfcn = end_earfcn - start_earfcn; + + if (nof_earfcn > max_elems) { + nof_earfcn = max_elems; + } + for (j=0;j 0;i++) { + if (lte_bands[i].area == region) { + n = lte_band_get_fd_band(i, &earfcn[nof_fd], -1, -1, max_elems); + if (n != -1) { + nof_fd += n; + max_elems -= n; + } else { + return -1; + } + } + } + return nof_fd; +} + diff --git a/lib/lte/src/phch_sequence.c b/lib/lte/src/phch_sequence.c new file mode 100644 index 000000000..281e8c6fa --- /dev/null +++ b/lib/lte/src/phch_sequence.c @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include "lte/base.h" +#include "lte/sequence.h" + +int sequence_pbch(sequence_t *seq, lte_cp_t cp, int cell_id) { + return sequence_LTEPRS(seq, CP_ISNORM(cp)?1920:1728, cell_id); +} diff --git a/lib/lte/src/sequence.c b/lib/lte/src/sequence.c new file mode 100644 index 000000000..7f85006ba --- /dev/null +++ b/lib/lte/src/sequence.c @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include "lte/sequence.h" + +#include +#include +#include + +#define Nc 1600 +#define GOLDMAXLEN (Nc*10) +static int x1[GOLDMAXLEN]; +static int x2[GOLDMAXLEN]; + + +/* + * Pseudo Random Sequence generation. + * It follows the 3GPP Release 8 (LTE) 36.211 + * Section 7.2 + */ +void generate_prs_c(sequence_t *q, unsigned int seed) { + int n; + + assert(q->len + Nc + 31 < GOLDMAXLEN); + + for (n = 0; n < 31; n++) { + x1[n] = 0; + x2[n] = (seed >> n) & 0x1; + } + x1[0] = 1; + + for (n = 0; n < Nc + q->len; n++) { + x1[n + 31] = (x1[n + 3] + x1[n]) & 0x1; + x2[n + 31] = (x2[n + 3] + x2[n + 2] + x2[n]) & 0x1; + } + + for (n = 0; n < q->len; n++) { + q->c[n] = (x1[n + Nc] + x2[n + Nc]) & 0x1; + } + +} + +int sequence_LTEPRS(sequence_t *q, int len, int seed) { + if (sequence_init(q, len)) { + return -1; + } + q->len = len; + generate_prs_c(q, seed); + return 0; +} + +int sequence_init(sequence_t *q, int len) { + if (q->c && (q->len != len)) { + free(q->c); + } + if (!q->c) { + q->c = malloc(len * sizeof(char)); + if (!q->c) { + return -1; + } + } + return 0; +} + +void sequence_free(sequence_t *q) { + if (q->c) { + free(q->c); + } + bzero(q, sizeof(sequence_t)); +} + + diff --git a/lib/modem/src/demod_hard.c b/lib/modem/src/demod_hard.c new file mode 100644 index 000000000..0c2a7abf6 --- /dev/null +++ b/lib/modem/src/demod_hard.c @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#include "modem/demod_hard.h" +#include "hard_demod_lte.h" + + +void demod_hard_init(demod_hard_t* q) { + bzero((void*) q, sizeof(demod_hard_t)); +} + +void demod_hard_table(demod_hard_t* q, enum modem_std table) { + q->table = table; +} + +int demod_hard_demodulate(demod_hard_t* q, const cf* symbols, char *bits, int nsymbols) { + + int nbits=-1; + switch(q->table) { + case LTE_BPSK: + hard_bpsk_demod(symbols,bits,nsymbols); + nbits=nsymbols; + break; + case LTE_QPSK: + hard_qpsk_demod(symbols,bits,nsymbols); + nbits=nsymbols*2; + break; + case LTE_QAM16: + hard_qam16_demod(symbols,bits,nsymbols); + nbits=nsymbols*4; + break; + case LTE_QAM64: + hard_qam64_demod(symbols,bits,nsymbols); + nbits=nsymbols*6; + break; + } + return nbits; +} + + +int demod_hard_initialize(demod_hard_hl* hl) { + demod_hard_init(&hl->obj); + demod_hard_table(&hl->obj,hl->init.std); + + return 0; +} + +int demod_hard_work(demod_hard_hl* hl) { + int ret = demod_hard_demodulate(&hl->obj,hl->input,hl->output,hl->in_len); + if (hl->out_len) { + *hl->out_len = ret; + } + return 0; +} + + + diff --git a/lib/modem/src/demod_soft.c b/lib/modem/src/demod_soft.c new file mode 100644 index 000000000..8f158e0f0 --- /dev/null +++ b/lib/modem/src/demod_soft.c @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#include "utils/bit.h" +#include "modem/demod_soft.h" +#include "soft_algs.h" + + +void demod_soft_init(demod_soft_t *q) { + bzero((void*)q,sizeof(demod_soft_t)); +} + +void demod_soft_table_set(demod_soft_t *q, modem_table_t *table) { + q->table = table; +} + +void demod_soft_alg_set(demod_soft_t *q, enum alg alg_type) { + q->alg_type = alg_type; +} + +void demod_soft_sigma_set(demod_soft_t *q, float sigma) { + q->sigma = sigma; +} + +int demod_soft_demodulate(demod_soft_t *q, const cf* symbols, float* llr, int nsymbols) { + switch(q->alg_type) { + case EXACT: + llr_exact(symbols, llr, nsymbols, q->table->nsymbols, q->table->nbits_x_symbol, + q->table->symbol_table, q->table->soft_table.idx, q->sigma); + break; + case APPROX: + llr_approx(symbols, llr, nsymbols, q->table->nsymbols, q->table->nbits_x_symbol, + q->table->symbol_table, q->table->soft_table.idx, q->sigma); + break; + } + return nsymbols*q->table->nbits_x_symbol; +} + + + +/* High-Level API */ +int demod_soft_initialize(demod_soft_hl* hl) { + modem_table_init(&hl->table); + if (modem_table_std(&hl->table,hl->init.std,true)) { + return -1; + } + demod_soft_init(&hl->obj); + hl->obj.table = &hl->table; + + return 0; +} + +int demod_soft_work(demod_soft_hl* hl) { + hl->obj.sigma = hl->ctrl_in.sigma; + hl->obj.alg_type = hl->ctrl_in.alg_type; + int ret = demod_soft_demodulate(&hl->obj,hl->input,hl->output,hl->in_len); + if (hl->out_len) { + *hl->out_len = ret; + } + return 0; +} + +int demod_soft_stop(demod_soft_hl* hl) { + modem_table_free(&hl->table); + return 0; +} diff --git a/lib/modem/src/hard_demod_lte.c b/lib/modem/src/hard_demod_lte.c new file mode 100644 index 000000000..282ccf36f --- /dev/null +++ b/lib/modem/src/hard_demod_lte.c @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez , Vuk Marojevic . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include + +#include "modem/demod_hard.h" +#include "hard_demod_lte.h" + +/** + * @ingroup Hard BPSK demodulator + * + *LTE-BPSK constellation: + * Q + * | 0 + *---------> I + *1 | + * \param in input symbols (_Complex float) + * \param out output symbols (chars) + * \param N Number of input symbols + * \param modulation Modulation type + */ +inline void hard_bpsk_demod(const cf* in, char* out, int N) +{ + int s; + + for (s=0; s 0) { + if ((__imag__ in[s] > 0) || (__real__ in[s] > -__imag__ in[s])) { + out[s] = 0x0; + } else { + out[s] = 0x1; + } + } else { + if ((__imag__ in[s] < 0) || (__imag__ in[s] < -__real__ in[s])) { + out[s] = 0x1; + } else { + out[s] = 0x0; + } + } + } +} + +/** + * @ingroup Hard QPSK demodulator + * + * LTE-QPSK constellation: + * Q + *10 | 00 + *-----------> I + *11 | 01 + * + * \param in input symbols (_Complex float) + * \param out output symbols (chars) + * \param N Number of input symbols + * \param modulation Modulation type + */ +inline void hard_qpsk_demod(const cf* in, char* out, int N) +{ + int s; + + for (s=0; s 0) { + out[2*s] = 0x0; + } else { + out[2*s] = 0x1; + } + if (__imag__ in[s] > 0) { + out[2*s+1] = 0x0; + } else { + out[2*s+1] = 0x1; + } + } +} + +/** + * @ingroup Hard 16QAM demodulator + * + * LTE-16QAM constellation: + * Q + * 1011 1001 | 0001 0011 + * 1010 1000 | 0000 0010 + *---------------------------------> I + * 1110 1100 | 0100 0110 + * 1111 1101 | 0101 0111 + * + * \param in input symbols (_Complex float) + * \param out output symbols (chars) + * \param N Number of input symbols + * \param modulation Modulation type + */ +inline void hard_qam16_demod(const cf* in, char* out, int N) +{ + int s; + + for (s=0; s 0) { + out[4*s] = 0x0; + } else { + out[4*s] = 0x1; + } + + if ((__real__ in[s] > QAM16_THRESHOLD) || (__real__ in[s] < -QAM16_THRESHOLD)) { + out[4*s+2] = 0x1; + } else { + out[4*s+2] = 0x0; + } + + if (__imag__ in[s] > 0) { + out[4*s+1] = 0x0; + } else { + out[4*s+1] = 0x1; + } + + if ((__imag__ in[s] > QAM16_THRESHOLD) || (__imag__ in[s] < -QAM16_THRESHOLD)) { + out[4*s+3] = 0x1; + } else { + out[4*s+3] = 0x0; + } + } +} + +/** + * @ingroup Hard 64QAM demodulator + * + * LTE-64QAM constellation: + * see [3GPP TS 36.211 version 10.5.0 Release 10, Section 7.1.4] + * + * \param in input symbols (_Complex float) + * \param out output symbols (chars) + * \param N Number of input symbols + * \param modulation Modulation type + */ +inline void hard_qam64_demod(const cf* in, char* out, int N) +{ + int s; + + for (s=0; s 0){ + out[6*s] = 0x0; + } else { + out[6*s] = 0x1; + } + if ((__real__ in[s] > QAM64_THRESHOLD_3) || (__real__ in[s] < -QAM64_THRESHOLD_3)) { + out[6*s+2] = 0x1; + out[6*s+4] = 0x1; + } else if ((__real__ in[s] > QAM64_THRESHOLD_2) || (__real__ in[s] < -QAM64_THRESHOLD_2)) { + out[6*s+2] = 0x1; + out[6*s+4] = 0x0; + } else if ((__real__ in[s] > QAM64_THRESHOLD_1) || (__real__ in[s] < -QAM64_THRESHOLD_1)) { + out[6*s+2] = 0x0; + out[6*s+4] = 0x0; + } else { + out[6*s+2] = 0x0; + out[6*s+4] = 0x1; + } + + /* bits associated with/obtained from quadrature component: b1, b3, b5 */ + if (__imag__ in[s] > 0){ + out[6*s+1] = 0x0; + } else { + out[6*s+1] = 0x1; + } + if ((__imag__ in[s] > QAM64_THRESHOLD_3) || (__imag__ in[s] < -QAM64_THRESHOLD_3)) { + out[6*s+3] = 0x1; + out[6*s+5] = 0x1; + } else if ((__imag__ in[s] > QAM64_THRESHOLD_2) || (__imag__ in[s] < -QAM64_THRESHOLD_2)) { + out[6*s+3] = 0x1; + out[6*s+5] = 0x0; + } else if ((__imag__ in[s] > QAM64_THRESHOLD_1) || (__imag__ in[s] < -QAM64_THRESHOLD_1)) { + out[6*s+3] = 0x0; + out[6*s+5] = 0x0; + } else { + out[6*s+3] = 0x0; + out[6*s+5] = 0x1; + } + } +} diff --git a/lib/modem/src/hard_demod_lte.h b/lib/modem/src/hard_demod_lte.h new file mode 100644 index 000000000..5d73eb625 --- /dev/null +++ b/lib/modem/src/hard_demod_lte.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez , Vuk Marojevic . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +/* Thresholds for Demodulation */ +/* Assume perfect amplitude and phase alignment. + * Check threshold values for real case + * or implement dynamic threshold adjustent as a function of received symbol amplitudes */ +#define QAM16_THRESHOLD 2/sqrt(10) +#define QAM64_THRESHOLD_1 2/sqrt(42) +#define QAM64_THRESHOLD_2 4/sqrt(42) +#define QAM64_THRESHOLD_3 6/sqrt(42) + +void hard_bpsk_demod(const cf* in, char* out, int N); +void hard_qpsk_demod(const cf* in, char* out, int N); +void hard_qam16_demod(const cf* in, char* out, int N); +void hard_qam64_demod(const cf* in, char* out, int N); diff --git a/lib/modem/src/lte_tables.c b/lib/modem/src/lte_tables.c new file mode 100644 index 000000000..a970e87d5 --- /dev/null +++ b/lib/modem/src/lte_tables.c @@ -0,0 +1,270 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez , Vuk Marojevic . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include + +#include "modem/modem_table.h" +#include "lte_tables.h" + +/** + * Set the BPSK modulation table */ +void set_BPSKtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod) +{ + // LTE-BPSK constellation: + // Q + // | 0 + //---------> I + // 1 | + table[0] = BPSK_LEVEL + BPSK_LEVEL*_Complex_I; + table[1] = -BPSK_LEVEL -BPSK_LEVEL*_Complex_I; + + if (!compute_soft_demod) { + return; + } + + /* BSPK symbols containing a '0' and a '1' (only two symbols, 1 bit) */ + soft_table->idx[0][0][0] = 0; + soft_table->idx[1][0][0] = 1; +} + +/** + * Set the QPSK modulation table */ +void set_QPSKtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod) +{ + int i,j; + + // LTE-QPSK constellation: + // Q + // 10 | 00 + //-----------> I + // 11 | 01 + table[0] = QPSK_LEVEL + QPSK_LEVEL*_Complex_I; + table[1] = QPSK_LEVEL - QPSK_LEVEL*_Complex_I; + table[2] = -QPSK_LEVEL + QPSK_LEVEL*_Complex_I; + table[3] = -QPSK_LEVEL - QPSK_LEVEL*_Complex_I; + for (i=0;i<6;i++) { + for (j=0;j<32;j++) { + soft_table->idx[0][i][j] = 0; + soft_table->idx[1][i][j] = 0; + } + } + + if (!compute_soft_demod) { + return; + } + + /* QSPK symbols containing a '0' at the different bit positions */ + soft_table->idx[0][0][0] = 0; + soft_table->idx[0][0][1] = 1; + soft_table->idx[0][1][0] = 0; + soft_table->idx[0][1][1] = 2; + /* QSPK symbols containing a '1' at the different bit positions */ + soft_table->idx[1][0][0] = 2; + soft_table->idx[1][0][1] = 3; + soft_table->idx[1][1][0] = 1; + soft_table->idx[1][1][1] = 3; +} + +/** + * Set the 16QAM modulation table */ +void set_16QAMtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod) +{ + int i,j; + // LTE-16QAM constellation: + // Q + // 1011 1001 | 0001 0011 + // 1010 1000 | 0000 0010 + //---------------------------------> I + // 1110 1100 | 0100 0110 + // 1111 1101 | 0101 0111 + table[0] = QAM16_LEVEL_1 + QAM16_LEVEL_1*_Complex_I; + table[1] = QAM16_LEVEL_1 + QAM16_LEVEL_2*_Complex_I; + table[2] = QAM16_LEVEL_2 + QAM16_LEVEL_1*_Complex_I; + table[3] = QAM16_LEVEL_2 + QAM16_LEVEL_2*_Complex_I; + table[4] = QAM16_LEVEL_1 - QAM16_LEVEL_1*_Complex_I; + table[5] = QAM16_LEVEL_1 - QAM16_LEVEL_2*_Complex_I; + table[6] = QAM16_LEVEL_2 - QAM16_LEVEL_1*_Complex_I; + table[7] = QAM16_LEVEL_2 - QAM16_LEVEL_2*_Complex_I; + table[8] = -QAM16_LEVEL_1 + QAM16_LEVEL_1*_Complex_I; + table[9] = -QAM16_LEVEL_1 + QAM16_LEVEL_2*_Complex_I; + table[10] = -QAM16_LEVEL_2 + QAM16_LEVEL_1*_Complex_I; + table[11] = -QAM16_LEVEL_2 + QAM16_LEVEL_2*_Complex_I; + table[12] = -QAM16_LEVEL_1 - QAM16_LEVEL_1*_Complex_I; + table[13] = -QAM16_LEVEL_1 - QAM16_LEVEL_2*_Complex_I; + table[14] = -QAM16_LEVEL_2 - QAM16_LEVEL_1*_Complex_I; + table[15] = -QAM16_LEVEL_2 - QAM16_LEVEL_2*_Complex_I; + for (i=0;i<6;i++) { + for (j=0;j<32;j++) { + soft_table->idx[0][i][j] = 0; + soft_table->idx[1][i][j] = 0; + } + } + if (!compute_soft_demod) { + return; + } + + + /* Matrices identifying the zeros and ones of LTE-16QAM constellation */ + for (i=0;i<8;i++) { + soft_table->idx[0][0][i] = i; /* symbols with a '0' at the bit0 (leftmost)*/ + soft_table->idx[1][0][i] = i+8; /* symbols with a '1' at the bit0 (leftmost)*/ + } + /* symbols with a '0' ans '1' at the bit position 1: */ + for (i=0;i<4;i++) { + soft_table->idx[0][1][i] = i; + soft_table->idx[0][1][i+4] = i+8; + soft_table->idx[1][1][i] = i+4; + soft_table->idx[1][1][i+4] = i+12; + } + /* symbols with a '0' ans '1' at the bit position 2: */ + for (j=0;j<4;j++) { + for (i=0;i<2;i++) { + soft_table->idx[0][2][i+2*j] = i + 4*j; + soft_table->idx[1][2][i+2*j] = i+2 + 4*j; + } + } + /* symbols with a '0' ans '1' at the bit position 3: */ + for (i=0;i<8;i++) { + soft_table->idx[0][3][i] = 2*i; + soft_table->idx[1][3][i] = 2*i+1; + } +} + +/** + * Set the 64QAM modulation table */ +void set_64QAMtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod) +{ + int i,j; + // LTE-64QAM constellation: + // see [3GPP TS 36.211 version 10.5.0 Release 10, Section 7.1.4] + table[0] = QAM64_LEVEL_2 + QAM64_LEVEL_2*_Complex_I; + table[1] = QAM64_LEVEL_2 + QAM64_LEVEL_1*_Complex_I; + table[2] = QAM64_LEVEL_1 + QAM64_LEVEL_2*_Complex_I; + table[3] = QAM64_LEVEL_1 + QAM64_LEVEL_1*_Complex_I; + table[4] = QAM64_LEVEL_2 + QAM64_LEVEL_3*_Complex_I; + table[5] = QAM64_LEVEL_2 + QAM64_LEVEL_4*_Complex_I; + table[6] = QAM64_LEVEL_1 + QAM64_LEVEL_3*_Complex_I; + table[7] = QAM64_LEVEL_1 + QAM64_LEVEL_4*_Complex_I; + table[8] = QAM64_LEVEL_3 + QAM64_LEVEL_2*_Complex_I; + table[9] = QAM64_LEVEL_3 + QAM64_LEVEL_1*_Complex_I; + table[10] = QAM64_LEVEL_4 + QAM64_LEVEL_2*_Complex_I; + table[11] = QAM64_LEVEL_4 + QAM64_LEVEL_1*_Complex_I; + table[12] = QAM64_LEVEL_3 + QAM64_LEVEL_3*_Complex_I; + table[13] = QAM64_LEVEL_3 + QAM64_LEVEL_4*_Complex_I; + table[14] = QAM64_LEVEL_4 + QAM64_LEVEL_3*_Complex_I; + table[15] = QAM64_LEVEL_4 + QAM64_LEVEL_4*_Complex_I; + table[16] = QAM64_LEVEL_2 - QAM64_LEVEL_2*_Complex_I; + table[17] = QAM64_LEVEL_2 - QAM64_LEVEL_1*_Complex_I; + table[18] = QAM64_LEVEL_1 - QAM64_LEVEL_2*_Complex_I; + table[19] = QAM64_LEVEL_1 - QAM64_LEVEL_1*_Complex_I; + table[20] = QAM64_LEVEL_2 - QAM64_LEVEL_3*_Complex_I; + table[21] = QAM64_LEVEL_2 - QAM64_LEVEL_4*_Complex_I; + table[22] = QAM64_LEVEL_1 - QAM64_LEVEL_3*_Complex_I; + table[23] = QAM64_LEVEL_1 - QAM64_LEVEL_4*_Complex_I; + table[24] = QAM64_LEVEL_3 - QAM64_LEVEL_2*_Complex_I; + table[25] = QAM64_LEVEL_3 - QAM64_LEVEL_1*_Complex_I; + table[26] = QAM64_LEVEL_4 - QAM64_LEVEL_2*_Complex_I; + table[27] = QAM64_LEVEL_4 - QAM64_LEVEL_1*_Complex_I; + table[28] = QAM64_LEVEL_3 - QAM64_LEVEL_3*_Complex_I; + table[29] = QAM64_LEVEL_3 - QAM64_LEVEL_4*_Complex_I; + table[30] = QAM64_LEVEL_4 - QAM64_LEVEL_3*_Complex_I; + table[31] = QAM64_LEVEL_4 - QAM64_LEVEL_4*_Complex_I; + table[32] = -QAM64_LEVEL_2 + QAM64_LEVEL_2*_Complex_I; + table[33] = -QAM64_LEVEL_2 + QAM64_LEVEL_1*_Complex_I; + table[34] = -QAM64_LEVEL_1 + QAM64_LEVEL_2*_Complex_I; + table[35] = -QAM64_LEVEL_1 + QAM64_LEVEL_1*_Complex_I; + table[36] = -QAM64_LEVEL_2 + QAM64_LEVEL_3*_Complex_I; + table[37] = -QAM64_LEVEL_2 + QAM64_LEVEL_4*_Complex_I; + table[38] = -QAM64_LEVEL_1 + QAM64_LEVEL_3*_Complex_I; + table[39] = -QAM64_LEVEL_1 + QAM64_LEVEL_4*_Complex_I; + table[40] = -QAM64_LEVEL_3 + QAM64_LEVEL_2*_Complex_I; + table[41] = -QAM64_LEVEL_3 + QAM64_LEVEL_1*_Complex_I; + table[42] = -QAM64_LEVEL_4 + QAM64_LEVEL_2*_Complex_I; + table[43] = -QAM64_LEVEL_4 + QAM64_LEVEL_1*_Complex_I; + table[44] = -QAM64_LEVEL_3 + QAM64_LEVEL_3*_Complex_I; + table[45] = -QAM64_LEVEL_3 + QAM64_LEVEL_4*_Complex_I; + table[46] = -QAM64_LEVEL_4 + QAM64_LEVEL_3*_Complex_I; + table[47] = -QAM64_LEVEL_4 + QAM64_LEVEL_4*_Complex_I; + table[48] = -QAM64_LEVEL_2 - QAM64_LEVEL_2*_Complex_I; + table[49] = -QAM64_LEVEL_2 - QAM64_LEVEL_1*_Complex_I; + table[50] = -QAM64_LEVEL_1 - QAM64_LEVEL_2*_Complex_I; + table[51] = -QAM64_LEVEL_1 - QAM64_LEVEL_1*_Complex_I; + table[52] = -QAM64_LEVEL_2 - QAM64_LEVEL_3*_Complex_I; + table[53] = -QAM64_LEVEL_2 - QAM64_LEVEL_4*_Complex_I; + table[54] = -QAM64_LEVEL_1 - QAM64_LEVEL_3*_Complex_I; + table[55] = -QAM64_LEVEL_1 - QAM64_LEVEL_4*_Complex_I; + table[56] = -QAM64_LEVEL_3 - QAM64_LEVEL_2*_Complex_I; + table[57] = -QAM64_LEVEL_3 - QAM64_LEVEL_1*_Complex_I; + table[58] = -QAM64_LEVEL_4 - QAM64_LEVEL_2*_Complex_I; + table[59] = -QAM64_LEVEL_4 - QAM64_LEVEL_1*_Complex_I; + table[60] = -QAM64_LEVEL_3 - QAM64_LEVEL_3*_Complex_I; + table[61] = -QAM64_LEVEL_3 - QAM64_LEVEL_4*_Complex_I; + table[62] = -QAM64_LEVEL_4 - QAM64_LEVEL_3*_Complex_I; + table[63] = -QAM64_LEVEL_4 - QAM64_LEVEL_4*_Complex_I; + + if (!compute_soft_demod) { + return; + } + + /* Matrices identifying the zeros and ones of LTE-64QAM constellation */ + + for (i=0;i<32;i++) { + soft_table->idx[0][0][i] = i; /* symbols with a '0' at the bit0 (leftmost)*/ + soft_table->idx[1][0][i] = i+32; /* symbols with a '1' at the bit0 (leftmost)*/ + } + /* symbols with a '0' ans '1' at the bit position 1: */ + for (i=0;i<16;i++) { + soft_table->idx[0][1][i] = i; + soft_table->idx[0][1][i+16] = i+32; + soft_table->idx[1][1][i] = i+16; + soft_table->idx[1][1][i+16] = i+48; + } + /* symbols with a '0' ans '1' at the bit position 2: */ + for (i=0;i<8;i++) { + soft_table->idx[0][2][i] = i; + soft_table->idx[0][2][i+8] = i+16; + soft_table->idx[0][2][i+16] = i+32; + soft_table->idx[0][2][i+24] = i+48; + soft_table->idx[1][2][i] = i+8; + soft_table->idx[1][2][i+8] = i+24; + soft_table->idx[1][2][i+16] = i+40; + soft_table->idx[1][2][i+24] = i+56; + } + /* symbols with a '0' ans '1' at the bit position 3: */ + for (j=0;j<8;j++) { + for (i=0;i<4;i++) { + soft_table->idx[0][3][i+4*j] = i + 8*j; + soft_table->idx[1][3][i+4*j] = i+4 + 8*j; + } + } + /* symbols with a '0' ans '1' at the bit position 4: */ + for (j=0;j<16;j++) { + for (i=0;i<2;i++) { + soft_table->idx[0][4][i+2*j] = i + 4*j; + soft_table->idx[1][4][i+2*j] = i+2 + 4*j; + } + } + /* symbols with a '0' ans '1' at the bit position 5: */ + for (i=0;i<32;i++) { + soft_table->idx[0][5][i] = 2*i; + soft_table->idx[1][5][i] = 2*i+1; + } +} diff --git a/lib/modem/src/lte_tables.h b/lib/modem/src/lte_tables.h new file mode 100644 index 000000000..49f259547 --- /dev/null +++ b/lib/modem/src/lte_tables.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez , Vuk Marojevic . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#define BPSK_LEVEL 1/sqrt(2) + +#define QPSK_LEVEL 1/sqrt(2) + +#define QAM16_LEVEL_1 1/sqrt(10) +#define QAM16_LEVEL_2 3/sqrt(10) + +#define QAM64_LEVEL_1 1/sqrt(42) +#define QAM64_LEVEL_2 3/sqrt(42) +#define QAM64_LEVEL_3 5/sqrt(42) +#define QAM64_LEVEL_4 7/sqrt(42) + +#define QAM64_LEVEL_x 2/sqrt(42) +/* this is not an QAM64 level, but, rather, an auxiliary value that can be used for computing the + * symbol from the bit sequence */ + + + + +void set_BPSKtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod); +void set_QPSKtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod); +void set_16QAMtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod); +void set_64QAMtable(cf* table, soft_table_t *soft_table, bool compute_soft_demod); diff --git a/lib/modem/src/mod.c b/lib/modem/src/mod.c new file mode 100644 index 000000000..b78c36b68 --- /dev/null +++ b/lib/modem/src/mod.c @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#include "utils/bit.h" +#include "modem/mod.h" + +/** Low-level API */ + +int mod_modulate(modem_table_t* q, const char *bits, cf* symbols, int nbits) { + int i,j,idx; + char *b_ptr=(char*) bits; + j=0; + for (i=0;inbits_x_symbol) { + idx = bit_unpack(&b_ptr,q->nbits_x_symbol); + symbols[j] = q->symbol_table[idx]; + j++; + } + return j; +} + + +/* High-Level API */ +int mod_initialize(mod_hl* hl) { + modem_table_init(&hl->obj); + if (modem_table_std(&hl->obj,hl->init.std,false)) { + return -1; + } + + return 0; +} + +int mod_work(mod_hl* hl) { + int ret = mod_modulate(&hl->obj,hl->input,hl->output,hl->in_len); + if (hl->out_len) { + *hl->out_len = ret; + } + return 0; +} + +int mod_stop(mod_hl* hl) { + modem_table_free(&hl->obj); + return 0; +} + + diff --git a/lib/modem/src/modem_table.c b/lib/modem/src/modem_table.c new file mode 100644 index 000000000..1877cb046 --- /dev/null +++ b/lib/modem/src/modem_table.c @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include +#include +#include + +#include "modem/modem_table.h" +#include "lte_tables.h" + +/** Internal functions */ +static int table_create(modem_table_t* q) { + q->symbol_table = malloc(q->nsymbols*sizeof(cf)); + return q->symbol_table==NULL; +} + +void modem_table_init(modem_table_t* q) { + bzero((void*)q,sizeof(modem_table_t)); +} +void modem_table_free(modem_table_t* q) { + if (q->symbol_table) { + free(q->symbol_table); + } + bzero(q, sizeof(modem_table_t)); +} +void modem_table_reset(modem_table_t* q) { + modem_table_free(q); + modem_table_init(q); +} + +int modem_table_set(modem_table_t* q, cf* table, soft_table_t *soft_table, int nsymbols, int nbits_x_symbol) { + if (q->nsymbols) { + return -1; + } + q->nsymbols = nsymbols; + if (table_create(q)) { + return -1; + } + memcpy(q->symbol_table,table,q->nsymbols*sizeof(cf)); + memcpy(&q->soft_table,soft_table,sizeof(soft_table_t)); + q->nbits_x_symbol = nbits_x_symbol; + return 0; +} + +int modem_table_std(modem_table_t* q, enum modem_std std, bool compute_soft_demod) { + switch(std) { + case LTE_BPSK: + q->nbits_x_symbol = 1; + q->nsymbols = 2; + if (table_create(q)) { + return -1; + } + set_BPSKtable(q->symbol_table, &q->soft_table, compute_soft_demod); + break; + case LTE_QPSK: + q->nbits_x_symbol = 2; + q->nsymbols = 4; + if (table_create(q)) { + return -1; + } + set_QPSKtable(q->symbol_table, &q->soft_table, compute_soft_demod); + break; + case LTE_QAM16: + q->nbits_x_symbol = 4; + q->nsymbols = 16; + if (table_create(q)) { + return -1; + } + set_16QAMtable(q->symbol_table, &q->soft_table, compute_soft_demod); + break; + case LTE_QAM64: + q->nbits_x_symbol = 6; + q->nsymbols = 64; + if (table_create(q)) { + return -1; + } + set_64QAMtable(q->symbol_table, &q->soft_table, compute_soft_demod); + break; + } + return 0; +} diff --git a/lib/modem/src/soft_algs.c b/lib/modem/src/soft_algs.c new file mode 100644 index 000000000..deca250eb --- /dev/null +++ b/lib/modem/src/soft_algs.c @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2013, Vuk Marojevic . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#include +#include +#include +#include + +#include "soft_algs.h" + +/** + * @ingroup Soft Modulation Demapping based on the approximate + * log-likelihood algorithm + * Common algorithm that approximates the log-likelihood ratio. It takes + * only the two closest constellation symbols into account, one with a '0' + * and the other with a '1' at the given bit position. + * + * \param in input symbols (_Complex float) + * \param out output symbols (float) + * \param N Number of input symbols + * \param M Number of constellation points + * \param B Number of bits per symbol + * \param symbols constellation symbols + * \param S Soft demapping auxiliary matrix + * \param sigma2 Noise vatiance + */ +void llr_approx(const _Complex float *in, float *out, int N, int M, int B, + _Complex float *symbols, int (*S)[6][32], float sigma2) { + int i, s, b; + float num, den; + float new_num, new_den; + float idiff0, qdiff0, idiff1, qdiff1; + int change_sign = -1; + + for (s=0; s. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +void llr_approx(const _Complex float *in, float *out, int N, int M, int B, + _Complex float *symbols, int (*S)[6][32], float sigma2); + +void llr_exact(const _Complex float *in, float *out, int N, int M, int B, + _Complex float *symbols, int (*S)[6][32], float sigma2); diff --git a/lib/phch/src/common.c b/lib/phch/src/common.c new file mode 100644 index 000000000..0791b46f5 --- /dev/null +++ b/lib/phch/src/common.c @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#include "common.h" +#include "lte/base.h" + +void phch_cp_prb_ref(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb, bool advance_output) { + int i; + + int ref_interval = ((RE_X_RB / nof_refs) - 1); + memcpy(*output, *input, offset * sizeof(cf_t)); + *input += offset; + *output += offset; + for (i = 0; i < nof_refs * nof_prb - 1; i++) { + if (advance_output) { + (*output)++; + } else { + (*input)++; + } + memcpy(*output, *input, ref_interval * sizeof(cf_t)); + *output += ref_interval; + *input += ref_interval; + } + if (ref_interval - offset > 0) { + if (advance_output) { + (*output)++; + } else { + (*input)++; + } + memcpy(*output, *input, (ref_interval - offset) * sizeof(cf_t)); + *output += (ref_interval - offset); + *input += (ref_interval - offset); + } +} + +void phch_cp_prb(cf_t **input, cf_t **output, int nof_prb) { + memcpy(*output, *input, sizeof(cf_t) * RE_X_RB * nof_prb); + *input += nof_prb * RE_X_RB; + *output += nof_prb * RE_X_RB; +} + +void phch_put_prb_ref_(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb) { + phch_cp_prb_ref(input, output, offset, nof_refs, nof_prb, false); +} + +void phch_get_prb_ref(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb) { + phch_cp_prb_ref(input, output, offset, nof_refs, nof_prb, true); +} + diff --git a/lib/phch/src/common.h b/lib/phch/src/common.h new file mode 100644 index 000000000..56774ecbb --- /dev/null +++ b/lib/phch/src/common.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +typedef _Complex float cf_t; + +void phch_cp_prb_ref(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb, bool advance_input); +void phch_cp_prb(cf_t **input, cf_t **output, int nof_prb); +void phch_put_prb_ref_(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb); +void phch_get_prb_ref(cf_t **input, cf_t **output, int offset, int nof_refs, + int nof_prb); diff --git a/lib/phch/src/pbch.c b/lib/phch/src/pbch.c new file mode 100644 index 000000000..2ad4e95c8 --- /dev/null +++ b/lib/phch/src/pbch.c @@ -0,0 +1,375 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include +#include +#include + +#include "common.h" +#include "phch/pbch.h" +#include "lte/base.h" +#include "utils/bit.h" +#include "utils/vector.h" +#include "utils/debug.h" + +bool pbch_exists(int nframe, int nslot) { + return (!(nframe % 4) && nslot == 1); +} + +int pbch_cp(cf_t *input, cf_t *output, int nof_prb, lte_cp_t cp, int cell_id, bool put) { + int i; + cf_t *ptr; + if (put) { + ptr = input; + output += nof_prb * RE_X_RB / 2 - 36; + output += GUARD_RE(nof_prb); + } else { + ptr = output; + input += nof_prb * RE_X_RB / 2 - 36; + input += GUARD_RE(nof_prb); + } + + /* symbol 0 & 1 */ + for (i=0;i<2;i++) { + phch_cp_prb_ref(&input, &output, cell_id%3, 4, 6, put); + if (put) { + output += 2*GUARD_RE(nof_prb); + } else { + input += 2*GUARD_RE(nof_prb); + } + } + /* symbols 2 & 3 */ + if (CP_ISNORM(cp)) { + for (i=0;i<2;i++) { + phch_cp_prb(&input, &output, 6); + if (put) { + output += 2*GUARD_RE(nof_prb); + } else { + input += 2*GUARD_RE(nof_prb); + } + } + } else { + phch_cp_prb(&input, &output, 6); + if (put) { + output += 2*GUARD_RE(nof_prb); + } else { + input += 2*GUARD_RE(nof_prb); + } + phch_cp_prb_ref(&input, &output, cell_id%3, 4, 6, put); + } + if (put) { + return input - ptr; + } else { + return output - ptr; + } +} + +/** + * Puts PBCH in slot number 1 + * + * Returns the number of symbols written to slot1_data + * + * 36.211 10.3 section 6.6.4 + */ +int pbch_put(cf_t *pbch, cf_t *slot1_data, int nof_prb, lte_cp_t cp, int cell_id) { + return pbch_cp(pbch, slot1_data, nof_prb, cp, cell_id, true); +} + +/** + * Extracts PBCH from slot number 1 + * + * Returns the number of symbols written to pbch + * + * 36.211 10.3 section 6.6.4 + */ +int pbch_get(cf_t *slot1_data, cf_t *pbch, int nof_prb, lte_cp_t cp, int cell_id) { + return pbch_cp(slot1_data, pbch, nof_prb, cp, cell_id, false); +} + + +/* Checks CRC and blindly obtains the number of ports, which is saved in nof_ports. + * + * The bits buffer size must be at least 40 bytes. + * + * Returns 0 if the data is correct, -1 otherwise + */ +int pbch_crc_check(char *bits, int *nof_ports) { + int i, j; + const char crc_mask[3][16] = { + {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, + {1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}, + {0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1} + }; + const int ports[3] = {1, 2, 4}; + char data[40]; + + memcpy(data, bits, 24 * sizeof(char)); + + for (i=0;i<3;i++) { + for (j=0;j<16;j++) { + data[24+j] = (bits[24+j] + crc_mask[i][j]) % 2; + } + if (!crc(0, data, 40, 16, 0x11021, 0)) { + *nof_ports = ports[i]; + return 0; + } + } + *nof_ports = 0; + return -1; +} + + +/** Initializes the PBCH channel receiver */ +int pbch_init(pbch_t *q, int cell_id, lte_cp_t cp) { + int ret = -1; + bzero(q, sizeof(pbch_t)); + q->cell_id = cell_id; + q->cp = cp; + if (modem_table_std(&q->mod, LTE_QPSK, true)) { + goto clean; + } + demod_soft_init(&q->demod); + demod_soft_table_set(&q->demod, &q->mod); + demod_soft_alg_set(&q->demod, APPROX); + if (sequence_pbch(&q->seq_pbch, q->cp, q->cell_id)) { + goto clean; + } + + int poly[3] = {0x6D, 0x4F, 0x57}; + if (viterbi_init(&q->decoder, CONVCODER_37, poly, 40, true)) { + goto clean; + } + int nof_symbols = (CP_ISNORM(q->cp)) ? PBCH_RE_CPNORM: PBCH_RE_CPEXT; + + q->pbch_symbols = malloc(sizeof(cf_t) * nof_symbols); + if (!q->pbch_symbols) { + goto clean; + } + q->pbch_llr = malloc(sizeof(float) * nof_symbols * 4 * 2); + if (!q->pbch_llr) { + goto clean; + } + q->temp = malloc(sizeof(float) * nof_symbols * 4 * 2); + if (!q->temp) { + goto clean; + } + q->pbch_rm = malloc(sizeof(float) * 120); + if (!q->pbch_rm) { + goto clean; + } + q->data = malloc(sizeof(char) * 40); + if (!q->data) { + goto clean; + } + + ret = 0; +clean: + if (ret == -1) { + pbch_free(q); + } + return ret; +} + +void pbch_free(pbch_t *q) { + if (q->pbch_symbols) { + free(q->pbch_symbols); + } + if (q->pbch_llr) { + free(q->pbch_llr); + } + if (q->pbch_rm) { + free(q->pbch_rm); + } + if (q->data) { + free(q->data); + } + sequence_free(&q->seq_pbch); + modem_table_free(&q->mod); + viterbi_free(&q->decoder); +} + +/** Unpacks MIB from PBCH message. + * msg buffer must be 24 byte length at least + */ +void pbch_mib_unpack(char *msg, pbch_mib_t *mib) { + int bw, phich_res; + char *buffer; + + bw = 4*msg[0] + 2*msg[1] + msg[2]; + switch(bw) { + case 0: + mib->nof_prb = 6; + break; + case 1: + mib->nof_prb = 15; + break; + default: + mib->nof_prb = (bw-1)*25; + break; + } + if (msg[3]) { + mib->phich_length = EXTENDED; + } else { + mib->phich_length = NORMAL; + } + phich_res = 2*msg[4] + msg[5]; + switch(phich_res) { + case 0: + mib->phich_resources = R_1_6; + break; + case 1: + mib->phich_resources = R_1_2; + break; + case 2: + mib->phich_resources = R_1; + break; + case 3: + mib->phich_resources = R_2; + break; + } + buffer = &msg[6]; + mib->sfn = bit_unpack(&buffer, 8); +} + +void pbch_mib_fprint(FILE *stream, pbch_mib_t *mib) { + printf(" - Nof ports: %d\n", mib->nof_ports); + printf(" - PRB: %d\n", mib->nof_prb); + printf(" - PHICH Length: %s\n", mib->phich_length==EXTENDED?"Extended":"Normal"); + printf(" - PHICH Resources: "); + switch(mib->phich_resources) { + case R_1_6: + printf("1/6"); + break; + case R_1_2: + printf("1/2"); + break; + case R_1: + printf("1"); + break; + case R_2: + printf("2"); + break; + } + printf("\n"); + printf(" - SFN: %d\n", mib->sfn); +} + +void pbch_decode_reset(pbch_t *q) { + q->frame_idx = 0; +} + +int pbch_decode_frame(pbch_t *q, pbch_mib_t *mib, int src, int dst, int n, int nof_bits) { + int j; + + memcpy(&q->temp[dst*nof_bits], &q->pbch_llr[src*nof_bits], n*nof_bits*sizeof(float)); + + /* descramble */ + scrambling_float_offset(&q->seq_pbch, &q->temp[dst*nof_bits], dst*nof_bits, n*nof_bits); + + for (j=0;jtemp[j] = RX_NULL; + } + for (j=(dst+n)*nof_bits;j<4*nof_bits;j++) { + q->temp[j] = RX_NULL; + } + + /* unrate matching */ + rm_conv_rx(q->temp, q->pbch_rm, 4*nof_bits, 120); + + /* decode */ + viterbi_decode(&q->decoder, q->pbch_rm, q->data); + + /* check crc and get nof ports */ + if (pbch_crc_check(q->data, &mib->nof_ports)) { + + return 0; + } else { + + printf("BCH Decoded Correctly.\n"); + + /* unpack MIB */ + pbch_mib_unpack(q->data, mib); + + mib->sfn += dst-src; + + pbch_mib_fprint(stdout, mib); + + return 1; + } +} + +/* Decodes the PBCH channel + * + * The PBCH spans in 40 ms. This function is called every 10 ms. It tries to decode the MIB + * given the symbols of the slot #1 of each radio frame. Successive calls will use more frames + * to help the decoding process. + * + * Returns 1 if successfully decoded MIB, 0 if not and -1 on error + */ +int pbch_decode(pbch_t *q, cf_t *slot1_symbols, pbch_mib_t *mib, int nof_prb, float ebno) { + int src, dst, res, nb; + + int nof_symbols = (CP_ISNORM(q->cp)) ? PBCH_RE_CPNORM: PBCH_RE_CPEXT; + int nof_bits = 2 * nof_symbols; + + /* extract symbols */ + if (nof_symbols != pbch_get(slot1_symbols, q->pbch_symbols, nof_prb, + q->cp, q->cell_id)) { + fprintf(stderr, "There was an error getting the PBCH symbols\n"); + return -1; + } + + /* demodulate symbols */ + demod_soft_sigma_set(&q->demod, ebno); + demod_soft_demodulate(&q->demod, q->pbch_symbols, + &q->pbch_llr[nof_bits * q->frame_idx], nof_symbols); + + q->frame_idx++; + + INFO("PBCH: %d frames in buffer\n", q->frame_idx); + + /* We don't know where the 40 ms begin, so we try all combinations. E.g. if we received + * 4 frames, try 1,2,3,4 individually, 12, 23, 34 in pairs, 123, 234 and finally 1234. + * We know they are ordered. + */ + res = 0; + for (nb=0;nbframe_idx && !res;nb++) { + for (dst=0;(dst<4-nb) && !res;dst++) { + for (src=0;srcframe_idx && !res;src++) { + DEBUG("Trying %d blocks at offset %d as subframe mod4 number %d\n", nb+1, src, dst); + res = pbch_decode_frame(q, mib, src, dst, nb+1, nof_bits); + } + } + } + + if (res) { + q->frame_idx = 0; + return 1; + } else { + /* make room for the next packet of radio frame symbols */ + if (q->frame_idx == 4) { + memcpy(&q->pbch_llr[nof_bits], q->pbch_llr, nof_bits * 3 * sizeof(float)); + q->frame_idx = 3; + } + return 0; + } + +} diff --git a/lib/ratematching/src/rm_conv.c b/lib/ratematching/src/rm_conv.c new file mode 100644 index 000000000..af4a5dbe5 --- /dev/null +++ b/lib/ratematching/src/rm_conv.c @@ -0,0 +1,116 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include "ratematching/rm_conv.h" + +#define NCOLS 32 +#define NROWS_MAX NCOLS +#define RATE 3 + +unsigned char RM_PERM_CC[NCOLS] = + { 1, 17, 9, 25, 5, 21, 13, 29, 3, 19, 11, 27, 7, 23, 15, 31, 0, 16, 8, + 24, 4, 20, 12, 28, 2, 18, 10, 26, 6, 22, 14, 30 }; +unsigned char RM_PERM_CC_INV[NCOLS] = { 16, 0, 24, 8, 20, 4, 28, 12, 18, 2, 26, + 10, 22, 6, 30, 14, 17, 1, 25, 9, 21, 5, 29, 13, 19, 3, 27, 11, 23, 7, + 31, 15 }; + + +/* Undoes Convolutional Code Rate Matching. + * 3GPP TS 36.212 v10.1.0 section 5.1.4.2 + */ +int rm_conv_rx(float *input, float *output, int in_len, int out_len) { + + int nrows, ndummy, K_p; + int i, j, k; + int d_i, d_j; + + float tmp[RATE * NCOLS * NROWS_MAX]; + + nrows = (int) (out_len / RATE - 1) / NCOLS + 1; + if (nrows > NROWS_MAX) { + fprintf(stderr, "Output too large. Max output length is %d\n", RATE * NCOLS * NROWS_MAX); + return -1; + } + K_p = nrows * NCOLS; + + ndummy = K_p - out_len / RATE; + if (ndummy < 0) { + ndummy = 0; + } + + for (i = 0; i < RATE * K_p; i++) { + tmp[i] = RX_NULL; + } + + /* Undo bit collection. Account for dummy bits */ + k = 0; + j = 0; + while (k < in_len) { + d_i = (j % K_p) / nrows; + d_j = (j % K_p) % nrows; + + if (d_j * NCOLS + RM_PERM_CC[d_i] >= ndummy) { + if (tmp[j] == RX_NULL) { + tmp[j] = input[k]; + } else if (input[k] != RX_NULL) { + tmp[j] += input[k]; /* soft combine LLRs */ + } + k++; + } + j++; + if (j == RATE * K_p) { + j = 0; + } + } + + /* interleaving and bit selection */ + for (i = 0; i < out_len / RATE; i++) { + d_i = (i + ndummy) / NCOLS; + d_j = (i + ndummy) % NCOLS; + for (j = 0; j < RATE; j++) { + output[i * RATE + j] = tmp[K_p * j + + RM_PERM_CC_INV[d_j] * nrows + d_i]; + } + } + return 0; +} + +/** High-level API */ + +int rm_conv_initialize(rm_conv_hl* h) { + + return 0; +} + +/** This function can be called in a subframe (1ms) basis */ +int rm_conv_work(rm_conv_hl* hl) { + if (hl->init.direction) { + //rm_conv_tx(hl->input, hl->output, hl->in_len, hl->ctrl_in.S); + *(hl->out_len) = hl->ctrl_in.S; + } else { + rm_conv_rx(hl->input, hl->output, hl->in_len, hl->ctrl_in.E); + *(hl->out_len) = hl->ctrl_in.E; + } + return 0; +} + +int rm_conv_stop(rm_conv_hl* hl) { + return 0; +} + diff --git a/lib/resampling/src/interp.c b/lib/resampling/src/interp.c new file mode 100644 index 000000000..657c29e4d --- /dev/null +++ b/lib/resampling/src/interp.c @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ +#include +#include +#include "resampling/interp.h" +#include "utils/debug.h" + +/* Performs 1st order linear interpolation with out-of-bound interpolation */ +void interp_linear_offset(cf_t *input, cf_t *output, int M, int len, int off_st, int off_end) { + int i, j; + float mag0, mag1, arg0, arg1, mag, arg; + + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include "scrambling/scrambling.h" + +/** + * @ingroup Soft-bit Scrambling + * Scrambles the input softbit-sequence (floats) with the scrambling + * sequence (32-bit integers). + * + */ +void scrambling_float(sequence_t *s, float *data) { + scrambling_float_offset(s, data, 0, s->len); +} + +int scrambling_float_offset(sequence_t *s, float *data, int offset, int len) { + int i; + if (len + offset > s->len) { + return -1; + } + for (i = 0; i < len; i++) { + data[i] = data[i]*(1-2*s->c[i+offset]); + } + return 0; +} + +/** + * @ingroup Bit Scrambling + * Directly scrambles the input bit-sequence (char) with the scrambling + * sequence. + */ +void scrambling_bit(sequence_t *s, char *data) { + int i; + + for (i = 0; i < s->len; i++) { + data[i] = (data[i] + s->c[i]) % 2; + } +} + +/** High-level API */ + +int compute_sequences(scrambling_hl* h) { + + switch (h->init.channel) { + case PBCH: + return sequence_pbch(&h->obj.seq[0], h->init.nof_symbols == CPNORM_NSYMB, + h->init.cell_id); + case PDSCH: + case PCFICH: + case PDCCH: + case PMCH: + case PUCCH: + fprintf(stderr, "Not implemented\n"); + return -1; + default: + fprintf(stderr, "Invalid channel %d\n", h->init.channel); + return -1; + } +} + +int scrambling_initialize(scrambling_hl* h) { + + bzero(&h->obj, sizeof(scrambling_t)); + + return compute_sequences(h); +} + +/** This function can be called in a subframe (1ms) basis for LTE */ +int scrambling_work(scrambling_hl* hl) { + int sf; + if (hl->init.channel == PBCH) { + sf = 0; + } else { + sf = hl->ctrl_in.subframe; + } + sequence_t *seq = &hl->obj.seq[sf]; + + if (hl->init.hard) { + memcpy(hl->output, hl->input, sizeof(char) * hl->in_len); + scrambling_bit(seq, hl->output); + } else { + memcpy(hl->output, hl->input, sizeof(float) * hl->in_len); + scrambling_float(seq, hl->output); + } + *(hl->out_len) = hl->in_len; + return 0; +} + +int scrambling_stop(scrambling_hl* hl) { + int i; + for (i=0;iobj.seq[i]); + } + return 0; +} + diff --git a/lib/sync/src/cp.c b/lib/sync/src/cp.c new file mode 100644 index 000000000..567267290 --- /dev/null +++ b/lib/sync/src/cp.c @@ -0,0 +1,5 @@ + + +/** TODO: Cyclic-prefix based synchronization + * + */ diff --git a/lib/sync/src/find_sss.c b/lib/sync/src/find_sss.c new file mode 100644 index 000000000..8d7df4b80 --- /dev/null +++ b/lib/sync/src/find_sss.c @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include + +#include "utils/vector.h" +#include "sync/sss.h" + +cf_t corr_sz(cf_t *z, cf_t *s) { + cf_t sum; + cf_t zsprod[32]; + vec_dot_prod(z, s, zsprod, N_SSS - 1); + sum = sum_c(zsprod, N_SSS - 1); + + return sum; +} +void corr_all_zs(cf_t *z, cf_t s[32][32], cf_t *output) { + int m; + for (m = 0; m < N_SSS; m++) { + output[m] = corr_sz(z, s[m]); + } +} + + +/* Assumes input points to the beginning of the SSS symbol. The SSS symbol start is + * given by SSS_SYMBOL_ST() macro in sss.h. + * Estimates the m0 and m1 values and saves in m0_value and m1_value + * the resulted correlation (higher is more likely) + * + * + * Source: "SSS Detection Method for Initial Cell Search in 3GPP LTE FDD/TDD Dual Mode Receiver" + * Jung-In Kim, Jung-Su Han, Hee-Jin Roh and Hyung-Jin Choi + + * + */ +void sss_synch_m0m1(sss_synch_t *q, cf_t *input, int *m0, float *m0_value, + int *m1, float *m1_value) { + + /* This is aprox 3-4 kbytes of stack. Consider moving to sss_synch_t?? */ + cf_t zdelay[N_SSS+1],zconj[N_SSS+1],zprod[N_SSS+1]; + cf_t y[2][N_SSS+1], z[N_SSS+1], tmp[N_SSS+1]; + float tmp_real[N_SSS+1]; + cf_t input_fft[SSS_DFT_LEN]; + + int i; + + dft_run_c2c(&q->dftp_input, input, input_fft); + + for (i = 0; i < N_SSS; i++) { + y[0][i] = input_fft[SSS_POS_SYMBOL + 2 * i]; + y[1][i] = input_fft[SSS_POS_SYMBOL + 2 * i + 1]; + } + + vec_dot_prod(y[0], q->fc_tables.c[0], z, N_SSS); + memcpy(zdelay, &z[1], (N_SSS - 1) * sizeof(cf_t)); + vec_conj(z, zconj, N_SSS - 1); + vec_dot_prod(zdelay, zconj, zprod, N_SSS - 1); + + corr_all_zs(zprod, q->fc_tables.s, tmp); + vec_abs(tmp, tmp_real, N_SSS); + vec_max(tmp_real, m0_value, m0, N_SSS); + + vec_dot_prod(y[1], q->fc_tables.c[1], tmp, N_SSS); + vec_dot_prod(tmp, q->fc_tables.z1[*m0], z, N_SSS); + memcpy(zdelay, &z[1], (N_SSS - 1) * sizeof(cf_t)); + vec_conj(z, zconj, N_SSS - 1); + vec_dot_prod(zdelay, zconj, zprod, N_SSS - 1); + + corr_all_zs(zprod, q->fc_tables.s, tmp); + vec_abs(tmp, tmp_real, N_SSS); + vec_max(tmp_real, m1_value, m1, N_SSS); + +} + +void convert_tables(struct fc_tables *fc_tables, struct sss_tables *in) { + int i, j; + bzero(fc_tables, sizeof(struct fc_tables)); + for (i = 0; i < N_SSS; i++) { + for (j = 0; j < N_SSS; j++) { + __real__ fc_tables->z1[i][j] = (float) in->z1[i][j]; + } + } + for (i = 0; i < N_SSS; i++) { + for (j = 0; j < N_SSS - 1; j++) { + __real__ fc_tables->s[i][j] = (float) in->s[i][j + 1] * in->s[i][j]; + } + } + for (i = 0; i < 2; i++) { + for (j = 0; j < N_SSS; j++) { + __real__ fc_tables->c[i][j] = (float) in->c[i][j]; + } + } +} diff --git a/lib/sync/src/gen_sss.c b/lib/sync/src/gen_sss.c new file mode 100644 index 000000000..ca9c8da3c --- /dev/null +++ b/lib/sync/src/gen_sss.c @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include + +#include "sync/sss.h" + +/** + * @brief Function documentation: initSSStables() + * This function generates the scrambling sequences required for generation of + * SSS sequence according with 3GPP TS 36.211 version 10.5.0 Release 10. + */ +void generate_zsc_tilde(int *z_tilde, int *s_tilde, int *c_tilde) { + + int i; + int x[N_SSS]; + bzero(x, sizeof(int) * N_SSS); + x[4] = 1; + + for (i = 0; i < 26; i++) + x[i + 5] = (x[i + 2] + x[i]) % 2; + for (i = 0; i < N_SSS; i++) + s_tilde[i] = 1 - 2 * x[i]; + + for (i = 0; i < 26; i++) + x[i + 5] = (x[i + 3] + x[i]) % 2; + for (i = 0; i < N_SSS; i++) + c_tilde[i] = 1 - 2 * x[i]; + + for (i = 0; i < 26; i++) + x[i + 5] = (x[i + 4] + x[i + 2] + x[i + 1] + x[i]) % 2; + for (i = 0; i < N_SSS; i++) + z_tilde[i] = 1 - 2 * x[i]; +} + +void generate_m0m1(int N_id_1, int *m0, int *m1) { + int q_prime = N_id_1 / (N_SSS - 1); + int q = (N_id_1 + (q_prime * (q_prime + 1) / 2)) / (N_SSS - 1); + int m_prime = N_id_1 + (q * (q + 1) / 2); + *m0 = m_prime % N_SSS; + *m1 = (*m0 + m_prime / N_SSS + 1) % N_SSS; +} + + +/* table[m0][m1-1]=N_id_1 */ +void generate_N_id_1_table(int table[30][30]) { + int m0, m1; + int N_id_1; + for (N_id_1=0;N_id_1<168;N_id_1++) { + generate_m0m1(N_id_1, &m0, &m1); + table[m0][m1-1] = N_id_1; + } +} + + +void generate_s(int *s, int *s_tilde, int m0_m1) { + int i; + for (i = 0; i < N_SSS; i++) { + s[i] = s_tilde[(i + m0_m1) % N_SSS]; + } +} + +void generate_s_all(int s[N_SSS][N_SSS], int *s_tilde) { + int i; + for (i = 0; i < N_SSS; i++) { + generate_s(s[i], s_tilde, i); + } +} + +void generate_c(int *c, int *c_tilde, int N_id_2, int is_c0) { + int i; + for (i = 0; i < N_SSS; i++) { + c[i] = c_tilde[(i + N_id_2 + (is_c0 > 0 ? 3 : 0)) % N_SSS]; + } +} + +void generate_z(int *z, int *z_tilde, int m0_m1) { + int i; + for (i = 0; i < N_SSS; i++) { + z[i] = z_tilde[(i + (m0_m1 % 8)) % N_SSS]; + } +} + +void generate_z_all(int z[N_SSS][N_SSS], int *z_tilde) { + int i; + for (i = 0; i < N_SSS; i++) { + generate_z(z[i], z_tilde, i); + } +} + +void generate_sss_all_tables(struct sss_tables *tables, int N_id_2) { + int i; + int s_t[N_SSS], c_t[N_SSS], z_t[N_SSS]; + + generate_zsc_tilde(z_t, s_t, c_t); + generate_s_all(tables->s, s_t); + generate_z_all(tables->z1, z_t); + for (i = 0; i < 2; i++) { + generate_c(tables->c[i], c_t, N_id_2, i); + } + tables->N_id_2 = N_id_2; +} + +void generate_sss(float *signal, int cell_id) { + + int i; + int id1 = cell_id / 3; + int id2 = cell_id % 3; + int m0; + int m1; + int s_t[N_SSS], c_t[N_SSS], z_t[N_SSS]; + int s0[N_SSS], s1[N_SSS], c0[N_SSS], c1[N_SSS], z1_0[N_SSS], z1_1[N_SSS]; + + generate_m0m1(id1, &m0, &m1); + generate_zsc_tilde(z_t, s_t, c_t); + + generate_s(s0, s_t, m0); + generate_s(s1, s_t, m1); + + generate_c(c0, c_t, id2, 0); + generate_c(c1, c_t, id2, 1); + + generate_z(z1_0, z_t, m0); + generate_z(z1_1, z_t, m1); + + for (i = 0; i < N_SSS; i++) { + /** Even Resource Elements: Sub-frame 0*/ + signal[2 * i] = (float) (s0[i] * c0[i]); + /** Odd Resource Elements: Sub-frame 0*/ + signal[2 * i + 1] = (float) (s1[i] * c1[i] * z1_0[i]); + } + for (i = 0; i < N_SSS; i++) { + /** Even Resource Elements: Sub-frame 5*/ + signal[2 * i + N_SSS * 2] = (float) (s1[i] * c0[i]); + /** Odd Resource Elements: Sub-frame 5*/ + signal[2 * i + 1 + N_SSS * 2] = (float) (s0[i] * c1[i] * z1_1[i]); + } +} + diff --git a/lib/sync/src/pss.c b/lib/sync/src/pss.c new file mode 100644 index 000000000..3bd21a382 --- /dev/null +++ b/lib/sync/src/pss.c @@ -0,0 +1,395 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include +#include + +#include "sync/pss.h" +#include "utils/dft.h" +#include "utils/vector.h" +#include "utils/convolution.h" + +#define NOT_SYNC 0xF0F0F0F0 + +/* Initializes the object. subframe_size is the size, in samples, of the 1ms subframe + * + */ +int pss_synch_init(pss_synch_t *q, int frame_size) { + int ret = -1; + bzero(q, sizeof(pss_synch_t)); + + q->pss_signal_freq = vec_malloc((PSS_LEN_FREQ+frame_size) * sizeof(cf_t)); + if (!q->pss_signal_freq) { + fprintf(stderr, "Error allocating memory\n"); + goto clean_and_exit; + } + q->conv_abs = vec_malloc((PSS_LEN_FREQ+frame_size) * sizeof(float)); + if (!q->conv_abs) { + fprintf(stderr, "Error allocating memory\n"); + goto clean_and_exit; + } + q->tmp_input = vec_malloc((PSS_LEN_FREQ+frame_size) * sizeof(cf_t)); + if (!q->tmp_input) { + fprintf(stderr, "Error allocating memory\n"); + goto clean_and_exit; + } + q->frame_buffer = vec_malloc(4*frame_size * sizeof(cf_t)); + if (!q->frame_buffer) { + fprintf(stderr, "Error allocating memory\n"); + goto clean_and_exit; + } + q->conv_output = vec_malloc((PSS_LEN_FREQ+frame_size) * sizeof(cf_t)); + if (!q->conv_output) { + fprintf(stderr, "Error allocating memory\n"); + goto clean_and_exit; + } + +#ifdef CONVOLUTION_FFT + if (conv_fft_cc_init(&q->conv_fft, frame_size, PSS_LEN_FREQ)) { + fprintf(stderr, "Error initiating convolution FFT\n"); + goto clean_and_exit; + } +#endif + + q->correlation_threshold = DEFAULT_CORRELATION_TH; + q->nosync_timeout_frames = DEFAULT_NOSYNC_TIMEOUT; + q->cfo_auto = true; + q->N_id_2 = -1; + q->frame_size = frame_size; + q->frame_start_idx = NOT_SYNC; + q->fb_wp = 0; + + ret = 0; +clean_and_exit: + if (ret == -1) { + pss_synch_free(q); + } + return ret; +} + +void pss_synch_free(pss_synch_t *q) { + if (q->pss_signal_freq) { + free(q->pss_signal_freq); + } + if (q->conv_abs) { + free(q->conv_abs); + } + if (q->tmp_input) { + free(q->tmp_input); + } + if (q->frame_buffer) { + free(q->frame_buffer); + } + if (q->conv_output) { + free(q->conv_output); + } + +#ifdef CONVOLUTION_FFT + conv_fft_cc_free(&q->conv_fft); +#endif + + bzero(q, sizeof(pss_synch_t)); +} + +/** + * This function calculates the Zadoff-Chu sequence. + * @param signal Output array. + * @param direction 0 for tx, 1 for rx + */ +int pss_generate(cf_t *signal, int direction, int N_id_2) { + int i; + float arg; + const float root_value[] = {25.0,29.0,34.0}; + int root_idx; + + int sign = direction ? 1 : -1; + + if (N_id_2 < 0 || N_id_2 > 2) { + fprintf(stderr, "Invalid N_id_2 %d\n", N_id_2); + return -1; + } + + root_idx = N_id_2; + + for (i = 0; i < PSS_LEN / 2; i++) { + arg = (float) sign * M_PI * root_value[root_idx] + * ((float) i * ((float) i + 1.0)) / 63.0; + __real__ signal[i] = cos(arg); + __imag__ signal[i] = sin(arg); + } + for (i = PSS_LEN / 2; i < PSS_LEN; i++) { + arg = (float) sign * M_PI * root_value[root_idx] + * (((float) i + 2.0) * ((float) i + 1.0)) / 63.0; + __real__ signal[i] = cos(arg); + __imag__ signal[i] = sin(arg); + } + return 0; +} + + + +/** Sets the current N_id_2 value. Initializes the object for this PSS sequence + * Returns -1 on error, 0 otherwise + */ +int pss_synch_set_N_id_2(pss_synch_t *q, int N_id_2) { + q->N_id_2 = N_id_2; + + dft_plan_t plan; + cf_t pss_signal_pad[PSS_LEN_FREQ]; + cf_t pss_signal_time[PSS_LEN]; + + if (N_id_2 < 0 || N_id_2 > 2) { + fprintf(stderr, "Invalid N_id_2 %d\n", N_id_2); + return -1; + } + + pss_generate(pss_signal_time, 0, N_id_2); + + memset(pss_signal_pad, 0, PSS_LEN_FREQ * sizeof(cf_t)); + memset(q->pss_signal_freq, 0, PSS_LEN_FREQ * sizeof(cf_t)); + memcpy(&pss_signal_pad[33], pss_signal_time, PSS_LEN * sizeof(cf_t)); + + if (dft_plan(PSS_LEN_FREQ - 1, COMPLEX_2_COMPLEX, BACKWARD, &plan)) { + return -1; + } + plan.options = DFT_MIRROR_PRE | DFT_DC_OFFSET; + + dft_run_c2c(&plan, pss_signal_pad, q->pss_signal_freq); + + vec_mult_c_r(q->pss_signal_freq, pss_signal_pad, + (float) 1 / (PSS_LEN_FREQ - 1), PSS_LEN_FREQ); + + vec_conj(pss_signal_pad, q->pss_signal_freq, PSS_LEN_FREQ); + + q->N_id_2 = N_id_2; + + dft_plan_free(&plan); + + return 0; +} + +/** Returns the index of the PSS correlation peak in a subframe. + * The frame starts at corr_peak_pos-subframe_size/2. + * The value of the correlation is stored in corr_peak_value. + * + * Input buffer must be subframe_size long. + */ +int pss_synch_find_pss(pss_synch_t *q, cf_t *input, float *corr_peak_value, float *corr_mean_value) { + int corr_peak_pos; + int conv_output_len; + + memset(&q->pss_signal_freq[PSS_LEN_FREQ], 0, q->frame_size * sizeof(cf_t)); + memcpy(q->tmp_input, input, q->frame_size * sizeof(cf_t)); + memset(&q->tmp_input[q->frame_size], 0, PSS_LEN_FREQ * sizeof(cf_t)); + +#ifdef CONVOLUTION_FFT + conv_output_len = conv_fft_cc_run(&q->conv_fft, q->tmp_input, q->pss_signal_freq, q->conv_output); +#else + conv_output_len = conv_cc(input, q->pss_signal_freq, q->conv_output, q->frame_size, PSS_LEN_FREQ); +#endif + + vec_abs(q->conv_output, q->conv_abs, conv_output_len); + vec_max(q->conv_abs, corr_peak_value, &corr_peak_pos, conv_output_len); + if (corr_mean_value) { + *corr_mean_value = sum_r(q->conv_abs, conv_output_len) / conv_output_len; + } + + return corr_peak_pos; +} + +/* Returns the CFO estimation given a PSS received sequence + * + * Source: An Efficient CFO Estimation Algorithm for the Downlink of 3GPP-LTE + * Feng Wang and Yu Zhu + */ +float pss_synch_cfo_compute(pss_synch_t* q, cf_t *pss_recv) { + cf_t y0, y1, yr; + cf_t y[PSS_LEN_FREQ-1]; + + vec_dot_prod_u(q->pss_signal_freq, pss_recv, y, PSS_LEN_FREQ - 1); + + y0 = sum_c(y, (PSS_LEN_FREQ - 1)/2); + y1 = sum_c(&y[(PSS_LEN_FREQ - 1)/2], (PSS_LEN_FREQ - 1)/2); + yr = conjf(y0) * y1; + + return atan2f(__imag__ yr, __real__ yr) / M_PI; +} + + + + + + + + + + +/** This function is designed to be called periodically on a subframe basis. + * The function finds the PSS correlation peak and computes (does not adjust) CFO automatically as defined by + * pss_synch_set_cfo_mode(). + * + * If the PSS sequence is not found, returns 0 writes nothing to the output buffer. + * If the PSS sequence is found, aligns the beginning of the subframe to the output buffer and returns the number of samples + * written to the output buffer. + * If synchronized, subsequent calls to this function align the input buffer to the subframe beginning. + */ +int pss_synch_frame(pss_synch_t *q, cf_t *input, cf_t *output, int nsamples) { + int max_idx, tmp_start_idx; + int retval; + float max_value; + + if (nsamples != q->frame_size) { + fprintf(stderr, "Configured for frame size %d but got %d samples\n", + q->frame_size, nsamples); + return -1; + } + + if (q->N_id_2 < 0) { + fprintf(stderr, + "N_id_2 must be configured before calling pss_synch()\n"); + return -1; + } + + max_idx = pss_synch_find_pss(q, input, &max_value, NULL); + if (max_value > q->correlation_threshold) { + tmp_start_idx = max_idx - nsamples / 2; + if (q->frame_start_idx != tmp_start_idx) { + printf("Re-synchronizing: new index is %d, old was %d\n", + tmp_start_idx, q->frame_start_idx); + } + q->frame_start_idx = tmp_start_idx; + } else { + if (q->nosync_timeout_frames > 0) { + q->nof_nosync_frames++; + if (q->nof_nosync_frames >= q->nosync_timeout_frames) { + q->frame_start_idx = NOT_SYNC; + } + } + } + + if (q->frame_start_idx == NOT_SYNC) { + + memcpy(q->frame_buffer, input, nsamples * sizeof(cf_t)); + retval = 0; + + } else if (q->frame_start_idx > 0) { + + if (q->fb_wp) { + memcpy(&q->frame_buffer[(nsamples - q->frame_start_idx)], input, + q->frame_start_idx * sizeof(cf_t)); + memcpy(output, q->frame_buffer, nsamples * sizeof(cf_t)); + retval = nsamples; + } else { + retval = 0; + } + memcpy(q->frame_buffer, &input[q->frame_start_idx], + (nsamples - q->frame_start_idx) * sizeof(cf_t)); + q->fb_wp = 1; + + } else { + + memcpy(output, &q->frame_buffer[nsamples + q->frame_start_idx], + (-q->frame_start_idx) * sizeof(cf_t)); + memcpy(&output[-q->frame_start_idx], input, + (nsamples + q->frame_start_idx) * sizeof(cf_t)); + memcpy(&q->frame_buffer[nsamples + q->frame_start_idx], + &input[nsamples + q->frame_start_idx], + (-q->frame_start_idx) * sizeof(cf_t)); + retval = nsamples; + } + + if (q->frame_start_idx != NOT_SYNC && q->cfo_auto && retval) { + q->current_cfo = pss_synch_cfo_compute(q, &output[q->frame_size/2 - PSS_LEN_FREQ + 1]); + } + + return retval; +} + + +void pss_synch_set_timeout(pss_synch_t *q, int nof_frames) { + q->nosync_timeout_frames = nof_frames; +} + +void pss_synch_set_threshold(pss_synch_t *q, float threshold) { + q->correlation_threshold = threshold; +} + +void pss_synch_set_cfo_mode(pss_synch_t *q, bool cfo_auto) { + q->cfo_auto = cfo_auto; +} + +float pss_synch_get_cfo(pss_synch_t *q) { + return q->current_cfo; +} + +int pss_synch_get_frame_start_idx(pss_synch_t *q) { + return q->frame_start_idx; +} + + + + + + +/** High-level API */ + + + +int pss_synch_initialize(pss_synch_hl* h) { + int fs = h->init.frame_size; + if (!fs) { + fs = DEFAULT_FRAME_SIZE; + } + if (pss_synch_init(&h->obj, fs)) { + return -1; + } + if (h->init.unsync_nof_pkts) { + pss_synch_set_timeout(&h->obj, h->init.unsync_nof_pkts); + } + + pss_synch_set_N_id_2(&h->obj, h->init.N_id_2); + if (h->init.do_cfo) { + pss_synch_set_cfo_mode(&h->obj, true); + } else { + pss_synch_set_cfo_mode(&h->obj, false); + } + return 0; +} + +int pss_synch_work(pss_synch_hl* hl) { + + if (hl->ctrl_in.correlation_threshold) { + pss_synch_set_threshold(&hl->obj, hl->ctrl_in.correlation_threshold); + } + + *hl->out_len = pss_synch_frame(&hl->obj, hl->input, hl->output, hl->in_len); + if (*hl->out_len < 0) { + return -1; + } + + return 0; +} + +int pss_synch_stop(pss_synch_hl* hl) { + pss_synch_free(&hl->obj); + return 0; +} + diff --git a/lib/sync/src/sfo.c b/lib/sync/src/sfo.c new file mode 100644 index 000000000..70f576272 --- /dev/null +++ b/lib/sync/src/sfo.c @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include "sync/sfo.h" + +/* Estimate SFO based on the array of time estimates t0 + * of length len. The parameter period is the time between t0 samples + */ +float sfo_estimate(int *t0, int len, float period) { + int i; + float sfo=0.0; + for (i=1;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include + +#include "sync/sss.h" +#include "utils/dft.h" +#include "utils/convolution.h" + +void generate_sss_all_tables(struct sss_tables *tables, int N_id_2); +void convert_tables(struct fc_tables *fc_tables, struct sss_tables *in); +void generate_N_id_1_table(int table[30][30]); + +int sss_synch_init(sss_synch_t *q) { + bzero(q, sizeof(sss_synch_t)); + + if (dft_plan(SSS_DFT_LEN, COMPLEX_2_COMPLEX, FORWARD, &q->dftp_input)) { + return -1; + } + generate_N_id_1_table(q->N_id_1_table); + q->dftp_input.options = DFT_MIRROR_POS | DFT_DC_OFFSET; + return 0; +} + +void sss_synch_free(sss_synch_t *q) { + dft_plan_free(&q->dftp_input); + bzero(q, sizeof(sss_synch_t)); +} + +/** Initializes the SSS sequences for the given N_id_2 */ +int sss_synch_set_N_id_2(sss_synch_t *q, int N_id_2) { + if (N_id_2 < 0 || N_id_2 > 2) { + fprintf(stderr, "Invalid N_id_2 %d\n", N_id_2); + return -1; + } + + struct sss_tables sss_tables; + generate_sss_all_tables(&sss_tables,N_id_2); + convert_tables(&q->fc_tables, &sss_tables); + + return 0; +} + + +/* In this function, input points to the beginning of the subframe. Saves result in subframe_idx and N_id_1 + * Return 1 if the sequence was found, 0 if the peak is not found, -1 if the subframe_sz or symbol_sz are + * invalid or not configured. + * Before calling this function, the correlation threshold and symbol size duration need to be set + * using sss_synch_set_threshold() and sss_synch_set_symbol_sz(). + */ +int sss_synch_frame(sss_synch_t *q, cf_t *input, int *subframe_idx, + int *N_id_1) { + int m0,m1; + float m0_value, m1_value; + + if (q->subframe_sz <= 0 || q->symbol_sz <= 0) { + return -1; + } + + sss_synch_m0m1(q, &input[SSS_SYMBOL_ST(q->subframe_sz, q->symbol_sz)], + &m0, &m0_value, &m1, &m1_value); + + if (m0_value > q->corr_peak_threshold && m1_value > q->corr_peak_threshold) { + if (subframe_idx) { + *subframe_idx = sss_synch_subframe(m0, m1); + } + if (N_id_1) { + *N_id_1 = sss_synch_N_id_1(q, m0, m1); + } + return 1; + } else { + return 0; + } +} + +/** Used by sss_synch_frame() to compute the beginning of the SSS symbol + * symbol_sz MUST INCLUDE THE CYCLIC PREFIX SIZE + */ +void sss_synch_set_symbol_sz(sss_synch_t *q, int symbol_sz) { + q->symbol_sz = symbol_sz; +} + +/** Used by sss_synch_frame() to compute the beginning of the SSS symbol */ +void sss_synch_set_subframe_sz(sss_synch_t *q, int subframe_sz) { + q->subframe_sz = subframe_sz; +} + + +/** Sets the SSS correlation peak detection threshold */ +void sss_synch_set_threshold(sss_synch_t *q, float threshold) { + q->corr_peak_threshold = threshold; +} + +/** Returns the subframe index based on the m0 and m1 values */ +int sss_synch_subframe(int m0, int m1) { + if (m1 > m0) { + return 0; + } else { + return 5; + } +} + +/** Returns the N_id_1 value based on the m0 and m1 values */ +int sss_synch_N_id_1(sss_synch_t *q, int m0, int m1) { + if (m0<0 || m0>29 || m1<0 || m1>29) { + return -1; + } + if (m1 > m0) { + return q->N_id_1_table[m0][m1-1]; + } else { + return q->N_id_1_table[m1][m0-1]; + } +} + +/** High-level API */ + +int sss_synch_initialize(sss_synch_hl* h) { + + if (sss_synch_init(&h->obj)) { + return -1; + } + sss_synch_set_N_id_2(&h->obj, h->init.N_id_2); + + return 0; +} + +int sss_synch_work(sss_synch_hl* hl) { + + if (hl->ctrl_in.correlation_threshold) { + sss_synch_set_threshold(&hl->obj, hl->ctrl_in.correlation_threshold); + } + if (hl->ctrl_in.subframe_sz) { + sss_synch_set_subframe_sz(&hl->obj, hl->ctrl_in.subframe_sz); + } + if (hl->ctrl_in.symbol_sz) { + sss_synch_set_symbol_sz(&hl->obj, hl->ctrl_in.symbol_sz); + } + sss_synch_frame(&hl->obj, hl->input, &hl->ctrl_out.subframe_idx, &hl->ctrl_out.N_id_1); + + return 0; +} + +int sss_synch_stop(sss_synch_hl* hl) { + sss_synch_free(&hl->obj); + return 0; +} + diff --git a/lib/sync/src/sync.c b/lib/sync/src/sync.c new file mode 100644 index 000000000..0e0a5be28 --- /dev/null +++ b/lib/sync/src/sync.c @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#include + +#include "utils/debug.h" +#include "lte/base.h" +#include "sync/sync.h" + +int sync_init(sync_t *q) { + int N_id_2; + + bzero(q, sizeof(sync_t)); + q->force_N_id_2 = -1; + q->threshold = 1.5; + q->pss_mode = PEAK_MEAN; + + for (N_id_2=0;N_id_2<3;N_id_2++) { + if (pss_synch_init(&q->pss[N_id_2], 960)) { + fprintf(stderr, "Error initializing PSS object\n"); + return -1; + } + if (pss_synch_set_N_id_2(&q->pss[N_id_2], N_id_2)) { + fprintf(stderr, "Error initializing N_id_2\n"); + return -1; + } + if (sss_synch_init(&q->sss[N_id_2])) { + fprintf(stderr, "Error initializing SSS object\n"); + return -1; + } + if (sss_synch_set_N_id_2(&q->sss[N_id_2], N_id_2)) { + fprintf(stderr, "Error initializing N_id_2\n"); + return -1; + } + DEBUG("PSS and SSS initiated N_id_2=%d\n", N_id_2); + } + + return 0; +} + +void sync_free(sync_t *q) { + int N_id_2; + + for (N_id_2=0;N_id_2<3;N_id_2++) { + pss_synch_free(&q->pss[N_id_2]); + sss_synch_free(&q->sss[N_id_2]); + } +} + +void sync_pss_det_absolute(sync_t *q) { + q->pss_mode = ABSOLUTE; +} +void sync_pss_det_peakmean(sync_t *q) { + q->pss_mode = PEAK_MEAN; +} + +void sync_set_threshold(sync_t *q, float threshold) { + q->threshold = threshold; +} + +void sync_force_N_id_2(sync_t *q, int force_N_id_2) { + q->force_N_id_2 = force_N_id_2; +} + +int sync_get_cell_id(sync_t *q) { + if (q->N_id_1 >=0 && q->N_id_2 >= 0) { + return q->N_id_1*3 + q->N_id_2; + } else { + return -1; + } +} + +int sync_get_N_id_1(sync_t *q) { + return q->N_id_1; +} + +int sync_get_N_id_2(sync_t *q) { + return q->N_id_2; +} + +int sync_get_slot_id(sync_t *q) { + return q->slot_id; +} + +float sync_get_cfo(sync_t *q) { + return q->cfo; +} + +float sync_get_peak_to_avg(sync_t *q) { + return q->peak_to_avg; +} + +int sync_run(sync_t *q, cf_t *input, int read_offset) { + int N_id_2, peak_pos[3], sss_idx; + int m0, m1; + float m0_value, m1_value; + float peak_value[3]; + float mean_value[3]; + float max=-999; + int i; + int peak_detected; + + if (q->force_N_id_2 == -1) { + for (N_id_2=0;N_id_2<3;N_id_2++) { + peak_pos[N_id_2] = pss_synch_find_pss(&q->pss[N_id_2], &input[read_offset], + &peak_value[N_id_2], &mean_value[N_id_2]); + } + for (i=0;i<3;i++) { + if (peak_value[i] > max) { + max = peak_value[i]; + N_id_2 = i; + } + } + } else { + N_id_2 = q->force_N_id_2; + peak_pos[N_id_2] = pss_synch_find_pss(&q->pss[N_id_2], &input[read_offset], + &peak_value[N_id_2], &mean_value[N_id_2]); + } + + DEBUG("PSS possible peak N_id_2=%d, pos=%d value=%.2f threshold=%.2f\n", + N_id_2, peak_pos[N_id_2], peak_value[N_id_2], q->threshold); + + q->peak_to_avg = peak_value[N_id_2] / mean_value[N_id_2]; + + /* If peak detected */ + peak_detected = 0; + if (peak_pos[N_id_2] > 128) { + if (q->pss_mode == ABSOLUTE) { + if (peak_value[N_id_2] > q->threshold) { + peak_detected = 1; + } + } else { + if (q->peak_to_avg > q->threshold) { + peak_detected = 1; + } + } + } + if (peak_detected) { + + INFO("PSS peak detected N_id_2=%d, pos=%d value=%.2f\n", N_id_2, peak_pos[N_id_2], peak_value[N_id_2]); + + q->cfo = pss_synch_cfo_compute(&q->pss[N_id_2], &input[read_offset + peak_pos[N_id_2]-128]); + INFO("Estimated CFO=%.4f\n", q->cfo); + + sss_idx = read_offset + peak_pos[N_id_2]-2*(128+CP(128,CPNORM_LEN)); + if (sss_idx>= 0) { + sss_synch_m0m1(&q->sss[N_id_2], &input[sss_idx], + &m0, &m0_value, &m1, &m1_value); + + q->N_id_2 = N_id_2; + q->slot_id = 2 * sss_synch_subframe(m0, m1); + q->N_id_1 = sss_synch_N_id_1(&q->sss[N_id_2], m0, m1); + + INFO("SSS detected N_id_1=%d, slot_idx=%d, m0=%d, m1=%d\n", + q->N_id_1, q->slot_id, m0, m1); + + return peak_pos[N_id_2]; + } else { + return -1; + } + } else { + return -1; + } +} diff --git a/lib/utils/src/bit.c b/lib/utils/src/bit.c new file mode 100644 index 000000000..fda92913e --- /dev/null +++ b/lib/utils/src/bit.c @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +void bit_pack(uint32_t value, char **bits, int nof_bits) +{ + int i; + + for(i=0; i> (nof_bits-i-1)) & 0x1; + } + *bits += nof_bits; +} + +uint32_t bit_unpack(char **bits, int nof_bits) +{ + int i; + unsigned int value=0; + + for(i=0; i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include + +#include "utils/dft.h" +#include "utils/vector.h" +#include "utils/convolution.h" + + +int conv_fft_cc_init(conv_fft_cc_t *state, int input_len, int filter_len) { + state->input_len = input_len; + state->filter_len = filter_len; + state->output_len = input_len+filter_len-1; + state->input_fft = vec_malloc(sizeof(_Complex float)*state->output_len); + state->filter_fft = vec_malloc(sizeof(_Complex float)*state->output_len); + state->output_fft = vec_malloc(sizeof(_Complex float)*state->output_len); + if (!state->input_fft || !state->filter_fft || !state->output_fft) { + return -1; + } + if (dft_plan(state->output_len,COMPLEX_2_COMPLEX,FORWARD,&state->input_plan)) { + return -2; + } + if (dft_plan(state->output_len,COMPLEX_2_COMPLEX,FORWARD,&state->filter_plan)) { + return -3; + } + if (dft_plan(state->output_len,COMPLEX_2_COMPLEX,BACKWARD,&state->output_plan)) { + return -4; + } + return 0; +} + +void conv_fft_cc_free(conv_fft_cc_t *state) { + if (state->input_fft) { + free(state->input_fft); + } + if (state->filter_fft) { + free(state->filter_fft); + } + if (state->output_fft) { + free(state->output_fft); + } + dft_plan_free(&state->input_plan); + dft_plan_free(&state->filter_plan); + dft_plan_free(&state->output_plan); +} + +int conv_fft_cc_run(conv_fft_cc_t *state, _Complex float *input, _Complex float *filter, _Complex float *output) { + + dft_run_c2c(&state->input_plan, input, state->input_fft); + dft_run_c2c(&state->filter_plan, filter, state->filter_fft); + + vec_dot_prod(state->input_fft,state->filter_fft,state->output_fft,state->output_len); + + dft_run_c2c(&state->output_plan, state->output_fft, output); + + return state->output_len; + +} + +int conv_cc(_Complex float *input, _Complex float *filter, _Complex float *output, int input_len, int filter_len) { + int i,j; + int output_len; + output_len=input_len+filter_len-1; + memset(output,0,output_len*sizeof(_Complex float)); + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include +#include +#include +#include + +#include "utils/dft.h" + +#define div(a,b) ((a-1)/b+1) + + +int dft_plan_multi(const int *dft_points, dft_mode_t *modes, dft_dir_t *dirs, + int nof_plans, dft_plan_t *plans) { + int i; + for (i=0;iin = fftwf_malloc(size_in*len); + plan->out = fftwf_malloc(size_out*len); +} + +int dft_plan_c2c(const int dft_points, dft_dir_t dir, dft_plan_t *plan) { + int sign; + sign = (dir == FORWARD) ? FFTW_FORWARD : FFTW_BACKWARD; + allocate(plan,sizeof(fftwf_complex),sizeof(fftwf_complex), dft_points); + + plan->p = fftwf_plan_dft_1d(dft_points, plan->in, plan->out, sign, 0U); + if (!plan->p) { + return -1; + } + + plan->size = dft_points; + plan->mode = COMPLEX_2_COMPLEX; + + return 0; +} + +int dft_plan_r2r(const int dft_points, dft_dir_t dir, dft_plan_t *plan) { + int sign; + sign = (dir == FORWARD) ? FFTW_R2HC : FFTW_HC2R; + + allocate(plan,sizeof(float),sizeof(float), dft_points); + + plan->p = fftwf_plan_r2r_1d(dft_points, plan->in, plan->out, sign, 0U); + if (!plan->p) { + return -1; + } + + plan->size = dft_points; + plan->mode = REAL_2_REAL; + + return 0; +} + +int dft_plan_c2r(const int dft_points, dft_dir_t dir, dft_plan_t *plan) { + if (dft_plan_c2c(dft_points, dir, plan)) { + return -1; + } + plan->mode = COMPLEX_2_REAL; + return 0; +} + +static void copy(char *dst, char *src, int size_d, int len, int mirror, int dc_offset) { + int offset=dc_offset?1:0; + int hlen; + if (mirror == DFT_MIRROR_PRE) { + hlen = div(len,2); + memset(dst,0,size_d*offset); + memcpy(&dst[offset*size_d], &src[size_d*hlen], size_d*(hlen-offset)); + memcpy(&dst[hlen*size_d], src, size_d*(len - hlen)); + } else if (mirror == DFT_MIRROR_POS) { + hlen = div(len,2); + memcpy(dst, &src[size_d*hlen], size_d*hlen); + memcpy(&dst[hlen*size_d], &src[size_d*offset], size_d*(len - hlen)); + } else { + memcpy(dst,src,size_d*len); + } +} + +void dft_run(dft_plan_t *plan, void *in, void *out) { + switch(plan->mode) { + case COMPLEX_2_COMPLEX: + dft_run_c2c(plan,in,out); + break; + case REAL_2_REAL: + dft_run_r2r(plan,in,out); + break; + case COMPLEX_2_REAL: + dft_run_c2r(plan,in,out); + break; + } +} + +void dft_run_c2c(dft_plan_t *plan, dft_c_t *in, dft_c_t *out) { + float norm; + int i; + fftwf_complex *f_out = plan->out; + + copy((char*) plan->in,(char*) in,sizeof(dft_c_t),plan->size,plan->options & DFT_MIRROR_PRE, + plan->options & DFT_DC_OFFSET); + + fftwf_execute(plan->p); + + if (plan->options & DFT_NORMALIZE) { + norm = sqrtf(plan->size); + for (i=0;isize;i++) { + f_out[i] /= norm; + } + } + if (plan->options & DFT_OUT_DB) { + for (i=0;isize;i++) { + f_out[i] = 10*log10(f_out[i]); + } + } + copy((char*) out,(char*) plan->out,sizeof(dft_c_t),plan->size,plan->options & DFT_MIRROR_POS, + plan->options & DFT_DC_OFFSET); +} + +void dft_run_r2r(dft_plan_t *plan, dft_r_t *in, dft_r_t *out) { + float norm; + int i; + int len = plan->size; + float *f_out = plan->out; + + copy((char*) plan->in,(char*) in,sizeof(dft_r_t),plan->size,plan->options & DFT_MIRROR_PRE, + plan->options & DFT_DC_OFFSET); + + fftwf_execute(plan->p); + + if (plan->options & DFT_NORMALIZE) { + norm = plan->size; + for (i=0;ioptions & DFT_PSD) { + for (i=0;i<(len+1)/2-1;i++) { + out[i] = sqrtf(f_out[i]*f_out[i]+f_out[len-i-1]*f_out[len-i-1]); + } + } + if (plan->options & DFT_OUT_DB) { + for (i=0;iout; + + copy((char*) plan->in,(char*) in,sizeof(dft_r_t),plan->size,plan->options & DFT_MIRROR_PRE, + plan->options & DFT_DC_OFFSET); + + fftwf_execute(plan->p); + + if (plan->options & DFT_NORMALIZE) { + norm = plan->size; + for (i=0;isize;i++) { + f_out[i] /= norm; + } + } + if (plan->options & DFT_PSD) { + for (i=0;isize;i++) { + out[i] = (__real__ f_out[i])*(__real__ f_out[i])+ + (__imag__ f_out[i])*(__imag__ f_out[i]); + if (!(plan->options & DFT_OUT_DB)) { + out[i] = sqrtf(out[i]); + } + } + } + if (plan->options & DFT_OUT_DB) { + for (i=0;isize;i++) { + out[i] = 10*log10(out[i]); + } + } +} + + +void dft_plan_free(dft_plan_t *plan) { + if (!plan) return; + if (!plan->size) return; + if (plan->in) fftwf_free(plan->in); + if (plan->out) fftwf_free(plan->out); + if (plan->p) fftwf_destroy_plan(plan->p); + bzero(plan, sizeof(dft_plan_t)); +} + +void dft_plan_free_vector(dft_plan_t *plan, int nof_plans) { + int i; + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +#include +#include +#include +#include + +#include "utils/matrix.h" + +int matrix_init(void ***q, int sz_x, int sz_y, int elem_sz) { + int i; + int ret = -1; + *q = malloc(sizeof(void*) * sz_x); + if (!*q) { + goto free_and_exit; + } + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + + +#include + +/** + * Multiplexes a signal from nof_inputs interfaces. + * Copies output_lengths[i] samples to the i-th interfaces, ignores output_padding_pre[i] samples + * from the beginning each input interface. + */ +void mux(void **input, void *output, int *input_lengths, int *input_padding_pre, int nof_inputs, + int sample_sz) { + int i,r; + char *out = (char*) output; + char **in = (char**) input; + + r=0; + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + + +#include +#include +#include +#include +#include + +#include "utils/nco.h" + +void nco_init(nco_t *nco, int size) { + int i; + + nco->size=size; + nco->cost=malloc(size*sizeof(float)); + nco->sint=malloc(size*sizeof(float)); + assert(nco->cost && nco->sint); + + for (i=0;icost[i] = cosf(2*M_PI*i/size); + nco->sint[i] = sinf(2*M_PI*i/size); + } +} + +void nco_destroy(nco_t *nco) { + if (nco->cost) { + free(nco->cost); + } + if (nco->sint) { + free(nco->sint); + } + nco->size=0; + bzero(nco, sizeof(nco_t)); +} + +unsigned int nco_idx(float phase, int size) { + while(phase>=2*M_PI) { + phase-=2*M_PI; + } + unsigned int idx = (unsigned int) (phase*size/(2*M_PI)); + return idx; +} + +inline float nco_sin(nco_t *nco, float phase) { + return nco->sint[nco_idx(phase,nco->size)]; +} +inline float nco_cos(nco_t *nco, float phase) { + return nco->cost[nco_idx(phase,nco->size)]; +} +inline void nco_sincos(nco_t *nco, float phase, float *sin, float *cos) { + unsigned int idx = nco_idx(phase,nco->size); + *sin = nco->sint[idx]; + *cos = nco->cost[idx]; +} + +inline _Complex float nco_cexp(nco_t *nco, float arg) { + float s,c; + nco_sincos(nco,arg,&s,&c); + return c+I*s; +} + +void nco_sin_f(nco_t *nco, float *x, float freq, int len) { + int i; + unsigned int idx; + + idx=0; + for (i=0;isize/len))%nco->size; + x[i] = nco->sint[idx]; + } +} + + +void nco_cos_f(nco_t *nco, float *x, float freq, int len) { + int i; + unsigned int idx; + + idx=0; + for (i=0;isize/len))%nco->size; + x[i] = nco->cost[idx]; + } +} + + +void nco_cexp_f(nco_t *nco, _Complex float *x, float freq, int len) { + int i; + unsigned int idx; + + idx=0; + for (i=0;isize/len))%nco->size; + x[i] = nco->cost[idx] + I*nco->sint[idx]; + } +} + +void nco_cexp_f_direct(_Complex float *x, float freq, int len) { + int i; + for (i=0;i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + + +void pack_bits(unsigned int value, char **bits, int nof_bits) +{ + int i; + + for(i=0; i> (nof_bits-i-1)) & 0x1; + } + *bits += nof_bits; +} + +unsigned int unpack_bits(char **bits, int nof_bits) +{ + int i; + unsigned int value=0; + + for(i=0; i. + * This file is part of OSLD-lib (http://https://github.com/ismagom/osld-lib) + * + * OSLD-lib is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * OSLD-lib is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OSLD-lib. If not, see . + */ + +#include "utils/vector.h" +#include +#include +#include + +//#define HAVE_VOLK + +#ifdef HAVE_VOLK +#include "volk/volk.h" +#endif + +int sum_i(int *x, int len) { + int i; + int y=0; + for (i=0;im) { + m=x[i]; + p=i; + } + } + if (pos) *pos=p; + if (max) *max=m; +#else + unsigned int target=0; + volk_32f_index_max_16u_a(&target,x,(unsigned int) len); + if (pos) *pos=(int) target; + if (max) *max=x[target]; +#endif +} + + diff --git a/matlab/chest/get_ce.m b/matlab/chest/get_ce.m new file mode 100644 index 000000000..48221c75b --- /dev/null +++ b/matlab/chest/get_ce.m @@ -0,0 +1,230 @@ +function [ symb, ce ] = get_ce( samps, N_sf, N_id_cell, N_ant ) + + N_sc_rb = 12; % Only dealing with normal cp at this time + N_rb_dl_max = 110; + v_shift = mod(N_id_cell, 6); + sf_start_idx = f_start_idx + N_sf*30720; + crs0 = lte_generate_crs(mod(N_sf*2+0, 20), 0, N_id_cell); + crs1 = lte_generate_crs(mod(N_sf*2+0, 20), 1, N_id_cell); + crs4 = lte_generate_crs(mod(N_sf*2+0, 20), 4, N_id_cell); + crs7 = lte_generate_crs(mod(N_sf*2+1, 20), 0, N_id_cell); + crs8 = lte_generate_crs(mod(N_sf*2+1, 20), 1, N_id_cell); + crs11 = lte_generate_crs(mod(N_sf*2+1, 20), 4, N_id_cell); + crs14 = lte_generate_crs(mod(N_sf*2+2, 20), 0, N_id_cell); + crs15 = lte_generate_crs(mod(N_sf*2+2, 20), 1, N_id_cell); + + N_rb_dl = 6; + FFT_pad_size = 988; % FFT_size = 2048 + + for(n=0:15) + if(n < 7) + idx = sf_start_idx; + elseif(n < 14) + idx = sf_start_idx + 15360; + else + idx = sf_start_idx + 2*15360; + end + symb(n+1,:) = samps_to_symbs(samps, idx, mod(n,7), FFT_pad_size, 0); + end + + for(p=0:N_ant-1) + % Define v, crs, sym, and N_sym + if(p == 0) + v = [0, 3, 0, 3, 0]; + crs = [crs0; crs4; crs7; crs11; crs14]; + sym = [symb(0+1,:); symb(4+1,:); symb(7+1,:); symb(11+1,:); symb(14+1,:)]; + N_sym = 5; + elseif(p == 1) + v = [3, 0, 3, 0, 3]; + crs = [crs0; crs4; crs7; crs11; crs14]; + sym = [symb(0+1,:); symb(4+1,:); symb(7+1,:); symb(11+1,:); symb(14+1,:)]; + N_sym = 5; + elseif(p == 2) + v = [0, 3, 0]; + crs = [crs1; crs8; crs15]; + sym = [symb(1+1,:); symb(8+1,:); symb(15+1,:)]; + N_sym = 3; + else % p == 3 + v = [3, 6, 3]; + crs = [crs1; crs8; crs15]; + sym = [symb(1+1,:); symb(8+1,:); symb(15+1,:)]; + N_sym = 3; + end + + for(n=1:N_sym) + for(m=0:2*N_rb_dl-1) + k = 6*m + mod((v(n) + v_shift), 6); + m_prime = m + N_rb_dl_max - N_rb_dl; + tmp = sym(n,k+1)/crs(n,m_prime+1); + mag(n,k+1) = abs(tmp); + ang(n,k+1) = angle(tmp); + + % Unwrap phase + if(m > 0) + while((ang(n,k+1) - ang(n,k-6+1)) > pi) + ang(n,k+1) = ang(n,k+1) - 2*pi; + end + while((ang(n,k+1) - ang(n,k-6+1)) < -pi) + ang(n,k+1) = ang(n,k+1) + 2*pi; + end + end + + % Interpolate between CRSs (simple linear interpolation) + if(m > 0) + frac_mag = (mag(n,k+1) - mag(n,k-6+1))/6; + frac_ang = (ang(n,k+1) - ang(n,k-6+1))/6; + for(o=1:5) + mag(n,k-o+1) = mag(n,k-(o-1)+1) - frac_mag; + ang(n,k-o+1) = ang(n,k-(o-1)+1) - frac_ang; + end + end + + % Interpolate before 1st CRS + if(m == 1) + for(o=1:mod(v(n) + v_shift, 6)) + mag(n,k-6-o+1) = mag(n,k-6-(o-1)+1) - frac_mag; + ang(n,k-6-o+1) = ang(n,k-6-(o-1)+1) - frac_ang; + end + end + end + + % Interpolate after last CRS + for(o=1:(5-mod(v(n) + v_shift, 6))) + mag(n,k+o+1) = mag(n,k+(o-1)+1) - frac_mag; + ang(n,k+o+1) = ang(n,k+(o-1)+1) - frac_ang; + end + end + + % Interpolate between symbols and construct channel estimates + if(N_sym == 3) + for(n=1:N_sc_rb*N_rb_dl) + % Construct symbol 1 and 8 channel estimates directly + ce(p+1,1+1,n) = mag(1,n)*(cos(ang(1,n)) + j*sin(ang(1,n))); + ce(p+1,8+1,n) = mag(2,n)*(cos(ang(2,n)) + j*sin(ang(2,n))); + + % Interpolate for symbol 2, 3, 4, 5, 6, and 7 channel estimates + frac_mag = (mag(2,n) - mag(1,n))/7; + frac_ang = ang(2,n) - ang(1,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + frac_ang = frac_ang/7; + ce_mag = mag(2,n); + ce_ang = ang(2,n); + for(o=7:-1:2) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + + % Interpolate for symbol 0 channel estimate + % FIXME: Use previous slot to do this correctly + ce_mag = mag(1,n) - frac_mag; + ce_ang = ang(1,n) - frac_ang; + ce(p+1,0+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + + % Interpolate for symbol 9, 10, 11, 12, and 13 channel estimates + frac_mag = (mag(3,n) - mag(2,n))/7; + frac_ang = ang(3,n) - ang(2,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + frac_ang = frac_ang/7; + ce_mag = mag(3,n) - frac_mag; + ce_ang = ang(3,n) - frac_ang; + for(o=13:-1:9) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + end + else + for(n=1:N_sc_rb*N_rb_dl) + % Construct symbol 0, 4, 7, and 11 channel estimates directly + ce(p+1,0+1,n) = mag(1,n)*(cos(ang(1,n)) + j*sin(ang(1,n))); + ce(p+1,4+1,n) = mag(2,n)*(cos(ang(2,n)) + j*sin(ang(2,n))); + ce(p+1,7+1,n) = mag(3,n)*(cos(ang(3,n)) + j*sin(ang(3,n))); + ce(p+1,11+1,n) = mag(4,n)*(cos(ang(4,n)) + j*sin(ang(4,n))); + + % Interpolate for symbol 1, 2, and 3 channel estimates + frac_mag = (mag(2,n) - mag(1,n))/4; + frac_ang = ang(2,n) - ang(1,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + frac_ang = frac_ang/4; + ce_mag = mag(2,n); + ce_ang = ang(2,n); + for(o=3:-1:1) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + + % Interpolate for symbol 5 and 6 channel estimates + frac_mag = (mag(3,n) - mag(2,n))/3; + frac_ang = ang(3,n) - ang(2,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + frac_ang = frac_ang/3; + ce_mag = mag(3,n); + ce_ang = ang(3,n); + for(o=6:-1:5) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + + % Interpolate for symbol 8, 9, and 10 channel estimates + frac_mag = (mag(4,n) - mag(3,n))/4; + frac_ang = ang(4,n) - ang(3,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + frac_ang = frac_ang/4; + ce_mag = mag(4,n); + ce_ang = ang(4,n); + for(o=10:-1:8) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + + % Interpolate for symbol 12 and 13 channel estimates + frac_mag = (mag(5,n) - mag(4,n))/3; + frac_ang = ang(5,n) - ang(4,n); + if(frac_ang >= pi) % Wrap angle + frac_ang = frac_ang - 2*pi; + elseif(frac_ang <= -pi) + frac_ang = frac_ang + 2*pi; + end + + + frac_ang = frac_ang/3; + ce_mag = mag(5,n); + ce_ang = ang(5,n); + for(o=13:-1:12) + ce_mag = ce_mag - frac_mag; + ce_ang = ce_ang - frac_ang; + ce(p+1,o+1,n) = ce_mag*(cos(ce_ang) + j*sin(ce_ang)); + end + end + end + end + subplot(1,2,1) + pcolor(transpose(abs(reshape(ce(1,:,:),14,[])))) + subplot(1,2,2) + pcolor(transpose(real(symb(:,:)))) +end + diff --git a/matlab/chest/lte_generate_crs.m b/matlab/chest/lte_generate_crs.m new file mode 100644 index 000000000..371fa6c6d --- /dev/null +++ b/matlab/chest/lte_generate_crs.m @@ -0,0 +1,15 @@ +function [r] = lte_generate_crs(ns, l, cell_id) + + % Calculate c_init and sequence length + N_cp = 1; + c_init = 1024 * (7 * (ns+1) + l + 1) * (2 * cell_id + 1) + 2*cell_id + N_cp; + + % Generate the psuedo random sequence c + c = lte_generate_prs_c(c_init, 220); + + % Construct r + r = zeros(1,len); + for(m=0:len-1) + r(m+1) = (1/sqrt(2))*(1 - 2*c(2*m+1)) + j*(1/sqrt(2))*(1 - 2*c(2*m+1+1)); + end +end diff --git a/matlab/chest/lte_generate_prs_c.m b/matlab/chest/lte_generate_prs_c.m new file mode 100644 index 000000000..e1e192117 --- /dev/null +++ b/matlab/chest/lte_generate_prs_c.m @@ -0,0 +1,23 @@ + +function [c] = lte_generate_prs_c(c_init, seq_len) + % Initialize the m-sequences + x1 = zeros(1,1600+seq_len); + x2 = zeros(1,1600+seq_len); + tmp = c_init; + for(n=0:30) + x2(30-n+1) = floor(tmp/(2^(30-n))); + tmp = tmp - (floor(tmp/(2^(30-n)))*2^(30-n)); + end + x1(0+1) = 1; + + % Advance m-sequences + for(n=0:1600+seq_len) + x1(n+31+1) = mod(x1(n+3+1) + x1(n+1), 2); + x2(n+31+1) = mod(x2(n+3+1) + x2(n+2+1) + x2(n+1+1) + x2(n+1), 2); + end + + % Generate c + for(n=0:seq_len-1) + c(n+1) = mod(x1(n+1600+1) + x2(n+1600+1), 2); + end +end diff --git a/matlab/chest/samps_to_symbs.m b/matlab/chest/samps_to_symbs.m new file mode 100644 index 000000000..f8f1320b1 --- /dev/null +++ b/matlab/chest/samps_to_symbs.m @@ -0,0 +1,26 @@ +function [symbs] = samps_to_symbs(samps, slot_start_idx, symb_offset, FFT_pad_size, scale) + % Calculate index and CP length + if(mod(symb_offset, 7) == 0) + CP_len = 160; + else + CP_len = 144; + end + index = slot_start_idx + (2048+144)*symb_offset; + if(symb_offset > 0) + index = index + 16; + end + + % Take FFT + tmp = fftshift(fft(samps(index+CP_len:index+CP_len+2047))); + + % Remove DC subcarrier + tmp_symbs = [tmp(FFT_pad_size+1:1024); tmp(1026:2048-(FFT_pad_size-1))]; + + if(scale == 0) + symbs = tmp_symbs; + else + for(n=1:length(tmp_symbs)) + symbs(n) = cos(angle(tmp_symbs(n))) + j*sin(angle(tmp_symbs(n))); + end + end +end \ No newline at end of file diff --git a/matlab/sync/cfo_estimate_cp.m b/matlab/sync/cfo_estimate_cp.m new file mode 100644 index 000000000..e04960dad --- /dev/null +++ b/matlab/sync/cfo_estimate_cp.m @@ -0,0 +1,12 @@ +function cfo = cfo_estimate_cp(input, Nsyms, sym_len, cp0_len, cp_len) + +r=zeros(Nsyms, 1); +r(1)=sum(input(1:cp0_len).*conj(input(1+sym_len:cp0_len+sym_len))); +s=cp0_len+sym_len+1; +e=cp0_len+sym_len+cp_len; +for i=2:Nsyms + r(i)=sum(input(s:e).*conj(input(s+sym_len:e+sym_len))); + s=s+sym_len+cp_len; + e=e+sym_len+cp_len; +end +cfo=-angle(mean(r))/2/pi; \ No newline at end of file diff --git a/matlab/sync/cfo_test.m b/matlab/sync/cfo_test.m new file mode 100644 index 000000000..e3bf39c51 --- /dev/null +++ b/matlab/sync/cfo_test.m @@ -0,0 +1,27 @@ +%clear; +M=1000; +sym_len=128; +x=lte(1:M*15360*sym_len/2048*2000/1536); +%x=read_complex('../../../eclipse_osldlib/test.dat'); +%y=resample(x,99839996,100000000); + +input=resample(x,1536,2000); +%input=x; +%input=y(1:M*15360*sym_len/2048); +%input=resample(x,3840000,1920000); + +cp0_len=160*sym_len/2048; +cp1_len=144*sym_len/2048; + +slots=reshape(input,15360*sym_len/2048,[]); +[n m]=size(slots); + +cfo=zeros(m,1); +output=zeros(size(input)); +for i=1:m + cfo(i)=cfo_estimate(slots(:,i),7,sym_len,cp1_len,cp1_len); + t=(i-1)*n+1:i*n; + %output(t)=input(t).*exp(-1i*2*pi*cfo(i)*t/sym_len); +end + +plot(cfo) diff --git a/matlab/sync/check_pss.m b/matlab/sync/check_pss.m new file mode 100644 index 000000000..bcc595c33 --- /dev/null +++ b/matlab/sync/check_pss.m @@ -0,0 +1,38 @@ +function [ fs ] = check_pss( x, N_id_2) +%CHECK_PSS Summary of this function goes here +% Detailed explanation goes here +flen=9600; +n=length(x); +nf=floor(n/flen); + +xf=reshape(x(1:nf*flen),flen,[]); + +fs=zeros(nf,1); +cfo=zeros(nf,1); +cfo2=zeros(nf,1); +m_p=zeros(nf,1); +for i=1:nf-1 + [fs(i) cfo(i) m_p(i)]=find_pss(xf(:,i),N_id_2,false); + if (fs(i)<0) + j=0; + end +% cfo2(i) = cfo_estimate_cp(xf(fs(i)+960:fs(i)+2*960,i),7,128,10,9); +end + +sfo=sfo_estimate(fs, 5/1000); + +subplot(1,3,1) +plot(1:nf,fs) +legend('PSS-based'); +subplot(1,3,2) +plot(1:nf, cfo, 1:nf, cfo2) +legend('PSS-based','CP-based'); +if (nf > 0) + axis([0 nf -0.5 0.5]) +end +subplot(1,3,3) +plot(m_p) +fprintf('pss_mean=%g, pss_var=%g, cp_mean=%g, cp_var=%g m_p=%g sfo=%g Hz\n',mean(cfo),var(cfo), mean(cfo2), var(cfo2), mean(m_p), sfo) +end + + diff --git a/matlab/sync/compute_m0.m b/matlab/sync/compute_m0.m new file mode 100644 index 000000000..f109bd1de --- /dev/null +++ b/matlab/sync/compute_m0.m @@ -0,0 +1,55 @@ +function [ s0_m0 z1_m0 ] = compute_m0( m0) +%COMPUTE_S Summary of this function goes here +% Detailed explanation goes here + + % Generate s_tilda + x_s_tilda(0+1) = 0; + x_s_tilda(1+1) = 0; + x_s_tilda(2+1) = 0; + x_s_tilda(3+1) = 0; + x_s_tilda(4+1) = 1; + for(i_hat=0:25) + x_s_tilda(i_hat+5+1) = mod((x_s_tilda(i_hat+2+1) + x_s_tilda(i_hat+1)), 2); + end + for(idx=0:30) + s_tilda(idx+1) = 1 - 2*x_s_tilda(idx+1); + end + + % Generate c_tilda + x_c_tilda(0+1) = 0; + x_c_tilda(1+1) = 0; + x_c_tilda(2+1) = 0; + x_c_tilda(3+1) = 0; + x_c_tilda(4+1) = 1; + for(i_hat=0:25) + x_c_tilda(i_hat+5+1) = mod((x_c_tilda(i_hat+3+1) + x_c_tilda(i_hat+1)), 2); + end + for(idx=0:30) + c_tilda(idx+1) = 1 - 2*x_c_tilda(idx+1); + end + + % Generate z_tilda + x_z_tilda(0+1) = 0; + x_z_tilda(1+1) = 0; + x_z_tilda(2+1) = 0; + x_z_tilda(3+1) = 0; + x_z_tilda(4+1) = 1; + for(i_hat=0:25) + x_z_tilda(i_hat+5+1) = mod((x_z_tilda(i_hat+4+1) + x_z_tilda(i_hat+2+1) + x_z_tilda(i_hat+1+1) + x_z_tilda(i_hat+1)), 2); + end + for(idx=0:30) + z_tilda(idx+1) = 1 - 2*x_z_tilda(idx+1); + end + + % Generate s0_m0 and s1_m1 + for(n=0:30) + s0_m0(n+1) = s_tilda(mod(n + m0, 31)+1); + end + + % Generate z1_m0 and z1_m1 + for(n=0:30) + z1_m0(n+1) = z_tilda(mod(n + mod(m0, 8), 31)+1); + end + +end + diff --git a/matlab/sync/compute_m1.m b/matlab/sync/compute_m1.m new file mode 100644 index 000000000..8fdb4c0e7 --- /dev/null +++ b/matlab/sync/compute_m1.m @@ -0,0 +1,49 @@ +function [ s1_m1 ] = compute_m1( m1, N_id_2) +%COMPUTE_S Summary of this function goes here +% Detailed explanation goes here + + % Generate s_tilda + x_s_tilda(0+1) = 0; + x_s_tilda(1+1) = 0; + x_s_tilda(2+1) = 0; + x_s_tilda(3+1) = 0; + x_s_tilda(4+1) = 1; + for(i_hat=0:25) + x_s_tilda(i_hat+5+1) = mod((x_s_tilda(i_hat+2+1) + x_s_tilda(i_hat+1)), 2); + end + for(idx=0:30) + s_tilda(idx+1) = 1 - 2*x_s_tilda(idx+1); + end + + % Generate c_tilda + x_c_tilda(0+1) = 0; + x_c_tilda(1+1) = 0; + x_c_tilda(2+1) = 0; + x_c_tilda(3+1) = 0; + x_c_tilda(4+1) = 1; + for(i_hat=0:25) + x_c_tilda(i_hat+5+1) = mod((x_c_tilda(i_hat+3+1) + x_c_tilda(i_hat+1)), 2); + end + for(idx=0:30) + c_tilda(idx+1) = 1 - 2*x_c_tilda(idx+1); + end + + % Generate z_tilda + x_z_tilda(0+1) = 0; + x_z_tilda(1+1) = 0; + x_z_tilda(2+1) = 0; + x_z_tilda(3+1) = 0; + x_z_tilda(4+1) = 1; + for(i_hat=0:25) + x_z_tilda(i_hat+5+1) = mod((x_z_tilda(i_hat+4+1) + x_z_tilda(i_hat+2+1) + x_z_tilda(i_hat+1+1) + x_z_tilda(i_hat+1)), 2); + end + for(idx=0:30) + z_tilda(idx+1) = 1 - 2*x_z_tilda(idx+1); + end + + % Generate s0_m0 and s1_m1 + for(n=0:30) + s1_m1(n+1) = s_tilda(mod(n + m1, 31)+1); + end +end + diff --git a/matlab/sync/convfft.m b/matlab/sync/convfft.m new file mode 100644 index 000000000..5ccf9a886 --- /dev/null +++ b/matlab/sync/convfft.m @@ -0,0 +1,57 @@ +function [out] = convfft(z1,z2) +%CONVFFT FFT-based convolution and polynomial multiplication. +% C = CONVFFT(A, B) convolves vectors A and B. The resulting +% vector is length LENGTH(A)+LENGTH(B)-1. +% If A and B are vectors of polynomial coefficients, convolving +% them is equivalent to multiplying the two polynomials. +% +% Please contribute if you find this software useful. +% Report bugs to luigi.rosa@tiscali.it +% +%***************************************************************** +% Luigi Rosa +% Via Centrale 27 +% 67042 Civita di Bagno +% L'Aquila --- ITALY +% email luigi.rosa@tiscali.it +% mobile +39 340 3463208 +% http://utenti.lycos.it/matlab +%***************************************************************** +% + + +z1x=size(z1,1); +z1y=size(z1,2); +z2x=size(z2,1); +z2y=size(z2,2); +if (~isa(z1,'double'))||(~isa(z2,'double'))||(ndims(z1)>2)||(ndims(z2)>2) + disp('Error: input vector must be unidimensional double array'); + out=[]; + return; +else + if ((z1x>1)&&(z1y>1)) || ((z2x>1)&&(z2y>1)) + out=[]; + disp('Error: input vectors are double matrices'); + return; + + else + + if (z1x==1)&&(z1y>1) + z1=z1'; + z1x=z1y; + end + + + if (z2x==1)&&(z2y>1) + z2=z2'; + z2x=z2y; + end + + + if (any(any(imag(z1))))||(any(any(imag(z2)))) + out=(ifft(fft(z1,z1x+z2x-1).*fft(z2,z1x+z2x-1))); + else + out=real(ifft(fft(z1,z1x+z2x-1).*fft(z2,z1x+z2x-1))); + end + end +end \ No newline at end of file diff --git a/matlab/sync/correct_cfo.m b/matlab/sync/correct_cfo.m new file mode 100644 index 000000000..effff8dc4 --- /dev/null +++ b/matlab/sync/correct_cfo.m @@ -0,0 +1,14 @@ +function [ y eps] = correct_cfo( x ) + +xf=reshape(x,9600,[]); +yf=zeros(size(xf)); +[m n]=size(xf); + +for i=1:n + [fs eps(i)]=find_pss(xf(:,i),0,false); + yf(:,i)=xf(:,i).*exp(-1i.*(1:length(xf(:,i)))'.*2*pi*eps(i)/128); +end + +y=reshape(yf,1,[]); + +end \ No newline at end of file diff --git a/matlab/sync/cp_corr.m b/matlab/sync/cp_corr.m new file mode 100644 index 000000000..e212fb034 --- /dev/null +++ b/matlab/sync/cp_corr.m @@ -0,0 +1,9 @@ +function [ lambda ] = cp_corr( x, theta, N, L, rho) + +l0=sum(x(theta:theta+L-1).*conj(x(theta+N:theta+L+N-1))); +l1=0; +for i=theta:theta+L-1 + l1=l1+abs(x(i))^2+abs(x(i+N))^2; +end + +lambda=l0;%2*abs(l0)-rho*l1; diff --git a/matlab/sync/cp_pss_comp.m b/matlab/sync/cp_pss_comp.m new file mode 100644 index 000000000..a589e482a --- /dev/null +++ b/matlab/sync/cp_pss_comp.m @@ -0,0 +1,8 @@ +function cp_pss_comp(x,N_id_2) + +[ fs eps p_m w2] = find_pss( x, N_id_2); +lambda=zeros(1,length(x)-138); +for theta=1:length(lambda) + lambda(theta)=cp_corr(x,theta,128,9,0); +end +plot(1:length(w2),abs(w2)/max(abs(w2)),1:length(lambda),abs(lambda)/max(abs(lambda))) \ No newline at end of file diff --git a/matlab/sync/epsilon.m b/matlab/sync/epsilon.m new file mode 100644 index 000000000..4accfcc3a --- /dev/null +++ b/matlab/sync/epsilon.m @@ -0,0 +1,14 @@ +function [ eps fs] = epsilon( x ) + +xf=reshape(x,19200,[]); + +[m n]=size(xf); +eps=zeros(n,1); +fs=zeros(n,1); + +for i=1:n + [fs(i) eps(i)]=find_pss(xf(:,i),0,false); +end + + +end \ No newline at end of file diff --git a/matlab/sync/find_coarse_time_and_freq_offset.m b/matlab/sync/find_coarse_time_and_freq_offset.m new file mode 100644 index 000000000..7293c17a9 --- /dev/null +++ b/matlab/sync/find_coarse_time_and_freq_offset.m @@ -0,0 +1,82 @@ +function [coarse_start freq_offset] = find_coarse_time_and_freq_offset(in, N_cp_l_else) + + % Decompose input + in_re = real(in); + in_im = imag(in); + + abs_corr = zeros(1,960); + for(slot=0:10) + for(n=1:40:960) + corr_re = 0; + corr_im = 0; + for(z=1:N_cp_l_else) + index = (slot*960) + n-1 + z; + corr_re = corr_re + in_re(index)*in_re(index+128) + in_im(index)*in_im(index+128); + corr_im = corr_im + in_re(index)*in_im(index+128) - in_im(index)*in_re(index+128); + end + abs_corr(n) = abs_corr(n) + corr_re*corr_re + corr_im*corr_im; + end + end + + % Find first and second max + abs_corr_idx = zeros(1,2); + for(m=0:1) + abs_corr_max = 0; + for(n=1:480) + if(abs_corr((m*480)+n) > abs_corr_max) + abs_corr_max = abs_corr((m*480)+n); + abs_corr_idx(m+1) = (m*480)+n; + end + end + end + + % Fine correlation and fraction frequency offset + abs_corr = zeros(1,960); + corr_freq_err = zeros(1,960); + for(slot=1:10) + for(idx=1:2) + if((abs_corr_idx(idx) - 40) < 1) + abs_corr_idx(idx) = 41; + end + if((abs_corr_idx(idx) + 40) > 960) + abs_corr_idx(idx) = 960 - 40; + end + for(n=abs_corr_idx(idx)-40:abs_corr_idx(idx)+40) + corr_re = 0; + corr_im = 0; + for(z=1:N_cp_l_else) + index = (slot*960) + n-1 + z; + corr_re = corr_re + in_re(index)*in_re(index+128) + in_im(index)*in_im(index+128); + corr_im = corr_im + in_re(index)*in_im(index+128) - in_im(index)*in_re(index+128); + end + abs_corr(n) = abs_corr(n) + corr_re*corr_re + corr_im*corr_im; + corr_freq_err(n) = corr_freq_err(n) + atan2(corr_im, corr_re)/(128*2*pi*(0.0005/960)); + end + end + end + + % Find first and second max + abs_corr_idx = zeros(1,2); + for(m=0:1) + abs_corr_max = 0; + for(n=1:480) + if(abs_corr((m*480)+n) > abs_corr_max) + abs_corr_max = abs_corr((m*480)+n); + abs_corr_idx(m+1) = (m*480)+n; + end + end + end + + % Determine frequency offset FIXME No integer offset is calculated here + freq_offset = (corr_freq_err(abs_corr_idx(1))/10 + corr_freq_err(abs_corr_idx(2))/10)/2;23 + + % Determine the symbol start locations from the correlation peaks + % FIXME Needs some work + tmp = abs_corr_idx(1); + while(tmp > 0) + tmp = tmp - 2192; + end + for(n=1:7) + coarse_start(n) = tmp + (n*2192); + end +end \ No newline at end of file diff --git a/matlab/sync/find_pss.m b/matlab/sync/find_pss.m new file mode 100644 index 000000000..8ed366e1b --- /dev/null +++ b/matlab/sync/find_pss.m @@ -0,0 +1,35 @@ +function [ fs eps p_m w2] = find_pss( x, N_id_2, doplot) + if nargin == 2 + doplot = false; + end + + c=lte_pss_zc(N_id_2); + cc=[zeros(33,1); c; zeros(33,1)]; + ccf=[0; cc(65:128); cc(2:64)]; + ccf=conj(ifft(ccf)); + + w2=conv(x,ccf); + if (doplot) + plot(abs(w2)) + end + [m i]=max(abs(w2)); + fs=i-960; + p_m = m/mean(abs(w2)); + if doplot + fprintf('Frame starts at %d, m=%g, p=%g, p/m=%g dB\n',fs, ... + mean(abs(w2)), m, 10*log10(m/mean(abs(w2)))); + end + + % Estimate PSS-aided CFO +% if (i - 129) +% y=ccf.*x(i-128:i-1); +% +% y0=y(1:64); +% y1=y(65:length(y)); +% +% eps=angle(conj(sum(y0))*sum(y1))/pi; +% else + eps = NaN; +% end +end + diff --git a/matlab/sync/find_sss.m b/matlab/sync/find_sss.m new file mode 100644 index 000000000..a6fddd9c9 --- /dev/null +++ b/matlab/sync/find_sss.m @@ -0,0 +1,50 @@ +function [ m1 m2 out Zprod0 Zprod1] = find_sss( y, Nid_2,c0,c1) + + y=y((960-2*137+1):(960-137-9)); + yf=fft(y,128); + y=[yf(98:128) yf(2:32)]; + + + n=length(y); + Y0=y(1:2:n); + Y1=y(2:2:n); + + Z0=Y0.*c0; + nz=length(Z0); + + sm0=zeros(31,31); + sm1=zeros(31,31); + zm0=zeros(31,31); + + for i=1:31 + [sm0(i,:) zm0(i,:)]=compute_m0(i-1); + end + + Zprod0=Z0(2:nz).*conj(Z0(1:(nz-1))); + + sum0=zeros(31,1); + for m=1:31 + for i=2:31 + sum0(m)=sum0(m)+Z0(i)*conj(Z0(i-1))*sm0(m,i)*conj(sm0(m,i-1)); + end + end + + [mi1 i1]=max(abs(sum0)); + + Z1=Y1.*c1.*zm0(i1,:); + + Zprod1=Z1(2:nz).*conj(Z1(1:(nz-1))); + + sum1=zeros(31,1); + for m=1:31 + for i=2:31 + sum1(m)=sum1(m)+Z1(i)*conj(Z1(i-1))*sm0(m,i)*conj(sm0(m,i-1)); + end + end + [mi2 i2]=max(abs(sum1)); + + m1=i1; + m2=i2; + out=[sum0; sum1]; +end + diff --git a/matlab/sync/lte_generate_sss.m b/matlab/sync/lte_generate_sss.m new file mode 100644 index 000000000..d8b45e914 --- /dev/null +++ b/matlab/sync/lte_generate_sss.m @@ -0,0 +1,123 @@ +% +% Copyright 2011-2012 Ben Wojtowicz +% +% This program is free software: you can redistribute it and/or modify +% it under the terms of the GNU Affero General Public License as published by +% the Free Software Foundation, either version 3 of the License, or +% (at your option) any later version. +% +% This program is distributed in the hope that it will be useful, +% but WITHOUT ANY WARRANTY; without even the implied warranty of +% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +% GNU Affero General Public License for more details. +% +% You should have received a copy of the GNU Affero General Public License +% along with this program. If not, see . +% +% Function: lte_generate_sss +% Description: Generates LTE secondary synchronization signals +% Inputs: N_id_1 - Physical layer cell identity group +% N_id_2 - Physical layer identity +% Outputs: sss_d_u_0 - The sequence d(n) used for the secondary +% synchronization signal, an interleaved +% concatenation of two length-31 binary +% sequences for subframe 0 +% Outputs: sss_d_u_5 - The sequence d(n) used for the secondary +% synchronization signal, an interleaved +% concatenation of two length-31 binary +% sequences for subframe 5 +% Spec: 3GPP TS 36.211 section 6.11.2.1 v10.1.0 +% Notes: None +% Rev History: Ben Wojtowicz 10/28/2011 Created +% Ben Wojtowicz 01/29/2012 Fixed license statement +% +function [sss_d_u_0, sss_d_u_5 c0 c1 m0 m1] = lte_generate_sss(N_id_1, N_id_2) + % Validate N_id_1 + if(~(N_id_1 >= 0 && N_id_1 <= 167)) + fprintf('ERROR: Invalid N_id_1 (%u)\n', N_id_1); + sss_d_u_0 = 0; + sss_d_u_5 = 0; + return; + end + + % Validate N_id_2 + if(~(N_id_2 >= 0 && N_id_2 <= 2)) + fprintf('ERROR: Invalid N_id_2 (%u)\n', N_id_2); + sss_d_u_0 = 0; + sss_d_u_5 = 0; + return; + end + + % Generate m0 and m1 + q_prime = floor(N_id_1/30); + q = floor((N_id_1 + (q_prime*(q_prime+1)/2))/30); + m_prime = N_id_1 + (q*(q+1)/2); + m0 = mod(m_prime, 31); + m1 = mod((m0 + floor(m_prime/31) + 1), 31); + + % Generate s_tilda + x_s_tilda(0+1) = 0; + x_s_tilda(1+1) = 0; + x_s_tilda(2+1) = 0; + x_s_tilda(3+1) = 0; + x_s_tilda(4+1) = 1; + for(i_hat=0:25) + x_s_tilda(i_hat+5+1) = mod((x_s_tilda(i_hat+2+1) + x_s_tilda(i_hat+1)), 2); + end + for(idx=0:30) + s_tilda(idx+1) = 1 - 2*x_s_tilda(idx+1); + end + + % Generate c_tilda + x_c_tilda(0+1) = 0; + x_c_tilda(1+1) = 0; + x_c_tilda(2+1) = 0; + x_c_tilda(3+1) = 0; + x_c_tilda(4+1) = 1; + for(i_hat=0:25) + x_c_tilda(i_hat+5+1) = mod((x_c_tilda(i_hat+3+1) + x_c_tilda(i_hat+1)), 2); + end + for(idx=0:30) + c_tilda(idx+1) = 1 - 2*x_c_tilda(idx+1); + end + + % Generate z_tilda + x_z_tilda(0+1) = 0; + x_z_tilda(1+1) = 0; + x_z_tilda(2+1) = 0; + x_z_tilda(3+1) = 0; + x_z_tilda(4+1) = 1; + for(i_hat=0:25) + x_z_tilda(i_hat+5+1) = mod((x_z_tilda(i_hat+4+1) + x_z_tilda(i_hat+2+1) + x_z_tilda(i_hat+1+1) + x_z_tilda(i_hat+1)), 2); + end + for(idx=0:30) + z_tilda(idx+1) = 1 - 2*x_z_tilda(idx+1); + end + + % Generate s0_m0 and s1_m1 + for(n=0:30) + s0_m0(n+1) = s_tilda(mod(n + m0, 31)+1); + s1_m1(n+1) = s_tilda(mod(n + m1, 31)+1); + end + + % Generate c0 and c1 + for(n=0:30) + c0(n+1) = c_tilda(mod(n + N_id_2, 31)+1); + c1(n+1) = c_tilda(mod(n + N_id_2 + 3, 31)+1); + end + + % Generate z1_m0 and z1_m1 + for(n=0:30) + z1_m0(n+1) = z_tilda(mod(n + mod(m0, 8), 31)+1); + z1_m1(n+1) = z_tilda(mod(n + mod(m1, 8), 31)+1); + end + + % Generate SSS + for(n=0:30) + sss_d_u_0(2*n+1) = s0_m0(n+1) * c0(n+1); + sss_d_u_5(2*n+1) = s1_m1(n+1) * c0(n+1); + + sss_d_u_0(2*n+1+1) = s1_m1(n+1) * c1(n+1) * z1_m0(n+1); + sss_d_u_5(2*n+1+1) = s0_m0(n+1) * c1(n+1) * z1_m1(n+1); + end +end diff --git a/matlab/sync/lte_pss_zc.m b/matlab/sync/lte_pss_zc.m new file mode 100644 index 000000000..4bacf1917 --- /dev/null +++ b/matlab/sync/lte_pss_zc.m @@ -0,0 +1,26 @@ +function[a]=lte_pss_zc(cell_id) +% Function returns 1 out of 3 possible Zadoff-Chu sequences used in LTE. +% zadoff_chu element 32 left out. corresponds to DC carrier and is +% therefore not transmitted + +Nzc=62; +u=0; +if cell_id==0 + u=25; +end +if cell_id==1 + u=29; +end +if cell_id==2 + u=34; +end + +a = zeros(Nzc,1); +for n=0:30 + a(n+1)=exp(complex(0,-1)*pi*u*n*(n+1)/63); +end +for n=31:61 + a(n+1)=exp(complex(0,-1)*pi*u*(n+1)*(n+2)/63); +end + +end diff --git a/matlab/sync/sfo_estimate.m b/matlab/sync/sfo_estimate.m new file mode 100644 index 000000000..d9f46690f --- /dev/null +++ b/matlab/sync/sfo_estimate.m @@ -0,0 +1,7 @@ +function [ sfo ] = sfo_estimate( fs, T ) + +sfo = 0; +for i=2:length(fs) + sfo=sfo + (fs(i)-fs(i-1))/length(fs)/T; +end + diff --git a/matlab/sync/sym_sync_cp.m b/matlab/sync/sym_sync_cp.m new file mode 100644 index 000000000..a122ef182 --- /dev/null +++ b/matlab/sync/sym_sync_cp.m @@ -0,0 +1,13 @@ +function [ fs ] = sym_sync_cp( x, s ) + +lambda=zeros(1,length(x)-138); +for theta=1:length(lambda) + lambda(theta)=cp_corr(x,theta,128,9,s); +end + +subplot(1,2,1) +plot(abs(lambda)) +subplot(1,2,2) +plot(angle(lambda)) +[m i] = max(abs(lambda)) + diff --git a/matlab/sync/test.m b/matlab/sync/test.m new file mode 100644 index 000000000..33d3d6e92 --- /dev/null +++ b/matlab/sync/test.m @@ -0,0 +1,35 @@ +N=128; %128 subcarries +M=16; %QAM order +cp=9; %length of the cyclic prefix... Is increasing the cyclic prefix size gonna increase the efficiency? +scale = 1/sqrt(10); +hMod = modem.qammod(M); %QAM Modulator +hDemod = modem.qamdemod(hMod); %QAM demodulator +loops = 10; +SNR =0:5:35; +t1= cputime ; +% transmited signal. Contains N data points ranging from 0 to M-1 +ber=zeros(5,length(SNR)); +%% Creating the Rayleigh Multipath Channels +Ch = rayleighchan(1/1000,10); +Ch.ResetBeforeFiltering = 0; +sig = 1i*ones(loops,1); +h1 = filter(Ch,sig); +h2 = 0.1072*filter(Ch,sig); +h3 = 0.0120*filter(Ch,sig); +h4 = 0.0052*filter(Ch,sig); +% Delay Values +l1 = 4; +l2 = 7; +l3= 16; +%% +ofdm_cp=[]; + %tx=transmited_data; + for ik=1:loops%number of loops + tx = randi([0 M-1],1,N); % generate random data + sig=modulate(hMod, tx)*scale; % Modulate QAM modulated signal, devide by the square root of 10 to bring the average power of the signal to 1 + ofdm=sqrt(N).*ifft(sig,N); % generate OFDM signal IFFT on the parrellel data,multiply by sqrt(N) to adjust to the matlab computation , + ofdm_cp = [ofdm_cp ofdm(N-cp+1:N) ofdm]; % Add cyclic prefix + + end + + \ No newline at end of file diff --git a/scripts/binsource.h b/scripts/binsource.h new file mode 100644 index 000000000..f64638cb3 --- /dev/null +++ b/scripts/binsource.h @@ -0,0 +1,36 @@ +#include + +/* Low-level API */ +typedef struct { + unsigned int seed; + uint32_t *seq_buff; + int seq_buff_nwords; + int seq_cache_nbits; + int seq_cache_rp; +}binsource_t; + +void binsource_init(binsource_t* q); +void binsource_destroy(binsource_t* q); +void binsource_seed_set(binsource_t* q, unsigned int seed); +void binsource_seed_time(binsource_t *q); +int binsource_cache_gen(binsource_t* q, int nbits); +void binsource_cache_cpy(binsource_t* q, uint8_t *bits, int nbits); +int binsource_generate(binsource_t* q, uint8_t *bits, int nbits); + + +/* High-level API */ +typedef struct { + binsource_t obj; + struct binsource_init { + int cache_seq_nbits; /* default=2 */ + int seed; + } init; + struct binsource_ctrl_in { + int nbits; + } ctrl_in; + uint8_t* output[2]; /* size=2048*14 */ + int* out_len; +}binsource_hl; + +int binsource_initialize(binsource_hl* h); +int binsource_work( binsource_hl* hl); diff --git a/scripts/lib_binsource/CMakeLists.txt b/scripts/lib_binsource/CMakeLists.txt new file mode 100644 index 000000000..40fd36b4a --- /dev/null +++ b/scripts/lib_binsource/CMakeLists.txt @@ -0,0 +1,94 @@ +# This configuration is for the aloe++ skeleton + +# set-up the program libraries here +set(LIBRARIES m rt osld) + +# set-up program includes here +include_directories(/usr/local/include/) + +############## DO NOT NEED TO MODIFY BEYOND HERE + +get_filename_component(module ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +if( CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR ) + cmake_minimum_required (VERSION 2.6) + project (${module}) + + # The version number. + set (OECORE_VERSION_MAJOR 1) + set (OECORE_VERSION_MINOR 0) + set(MODULE_REPOS_NAME "default") + +else() + include_directories(${OESR_INCLUDE}) +endif() + + +file(GLOB_RECURSE SOURCES "src/*.c") +file(GLOB_RECURSE TEST_SOURCES "test/*.c") + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) + +# aloe module +add_library(${module}-aloe SHARED ${SOURCES}) +set_target_properties(${module}-aloe PROPERTIES OUTPUT_NAME ${module}) +set_target_properties(${module}-aloe PROPERTIES COMPILE_FLAGS "-D_COMPILE_ALOE") +target_link_libraries(${module}-aloe oesrapi skeleton ${LIBRARIES}) +install(TARGETS ${module}-aloe DESTINATION lib/${MODULE_REPOS_NAME}/) + + +if (NOT ${TEST_SOURCES} STREQUAL "") + # standalone program for testing + add_executable(${module}-bin ${SOURCES} ${TEST_SOURCES}) + set_target_properties(${module}-bin PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-bin PROPERTIES COMPILE_FLAGS "-D_COMPILE_STANDALONE") + target_link_libraries(${module}-bin standalone ${LIBRARIES}) + install(TARGETS ${module}-bin DESTINATION bin) +endif() + +# octave mex file +set(install_mex "") +if(NOT $ENV{OCTAVE_INCLUDE} STREQUAL "") + if(NOT $ENV{OCTAVE_LIBS} STREQUAL "") + + add_library(${module}-oct SHARED ${SOURCES}) + set_target_properties(${module}-oct PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-oct PROPERTIES PREFIX "am_") + set_target_properties(${module}-oct PROPERTIES SUFFIX .mex) + + set_target_properties(${module}-oct PROPERTIES COMPILE_FLAGS "-I$ENV{OCTAVE_INCLUDE} -D_COMPILE_MEX -Wl,-Bsymbolic -L$ENV{OCTAVE_LIBS} -loctinterp -loctave -lcruft -Wl,-Bsymbolic-functions -Wl,-z,relro") + target_link_libraries(${module}-oct aloe_octave ${LIBRARIES}) + install(TARGETS ${module}-oct DESTINATION mex) + + endif() +endif() + +#matlab mex +if(NOT $ENV{MATLAB_ROOT} STREQUAL "") + add_library(${module}-mat SHARED ${SOURCES}) + set_target_properties(${module}-mat PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-mat PROPERTIES PREFIX "am_") + + if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set_target_properties(${module}-mat PROPERTIES SUFFIX .mexa64) + set_target_properties(${module}-mat PROPERTIES COMPILE_FLAGS "-I$ENV{MATLAB_ROOT} -O -pthread -shared -Wl,--version-script,$ENV{MATLAB_ROOT}/extern/lib/glnxa64/mexFunction.map -Wl,--no-undefined -Wl,-rpath-link,$ENV{MATLAB_ROOT}/bin/glnxa64 -L$ENV{MATLAB_ROOT}/bin/glnxa64 -lmx -lmex -lmat -lm -lstdc++") + else() + set_target_properties(${module}-mat PROPERTIES SUFFIX .mexglx) + set_target_properties(${module}-mat PROPERTIES COMPILE_FLAGS "-I$ENV{MATLAB_ROOT} -O -pthread -shared -m32 -Wl,--version-script,$ENV{MATLAB_ROOT}/extern/lib/glnx86/mexFunction.map -Wl,--no-undefined -Wl,-rpath-link,$ENV{MATLAB_ROOT}/bin/glnx86 -L$ENV{MATLAB_ROOT}/bin/glnx86 -lmx -lmex -lmat -lm -lstdc++") + endif() + + target_link_libraries(${module}-mat aloe_matlab ${LIBRARIES}) + install(TARGETS ${module}-mat DESTINATION mex) + +endif() + + + + + + + + + + + diff --git a/scripts/lib_binsource/src/binsource.c b/scripts/lib_binsource/src/binsource.c new file mode 100644 index 000000000..6fa811ba9 --- /dev/null +++ b/scripts/lib_binsource/src/binsource.c @@ -0,0 +1,81 @@ +/* + * This file has been automatically generated from binsource + */ + +#include +#include +#include +#include + +#include "binsource.h" + +binsource_hl binsource; + +pmid_t nbits_id; + +int out_len[NOF_OUTPUT_ITF]; + +int initialize() { + + /* Initialization Parameters */ + if (param_get_int_name("cache_seq_nbits", &binsource.init.cache_seq_nbits)) { + binsource.init.cache_seq_nbits = 2; + } + if (param_get_int_name("seed", &binsource.init.seed)) { + binsource.init.seed = 0; + } + + /* Input Control Parameters */ + nbits_id = param_id("nbits"); + + /* Initialization function */ + return binsource_initialize(&binsource); +} + + +int work(void **inp, void **out) { + int i,n; +#if NOF_INPUTS>1 + for (i=0;i1 + for (i=0;i + +typedef uint8_t output_t; + +#define INPUT_MAX_SAMPLES 0 +#define OUTPUT_MAX_SAMPLES 2048*14 + +#define NOF_INPUT_ITF 0 +#define NOF_OUTPUT_ITF 2 + +#endif +/**@} */ + +#define GENERATE_COMPLEX + +#ifndef INCLUDE_DEFS_ONLY + +/* Input and output buffer sizes (in number of samples) */ +const int input_max_samples = INPUT_MAX_SAMPLES; +const int output_max_samples = OUTPUT_MAX_SAMPLES; + +/* leave these two lines unmodified */ +const int input_sample_sz = sizeof(input_t); +int output_sample_sz = sizeof(output_t); + +/* Number of I/O interfaces. All have the same maximum size */ +const int nof_input_itf = NOF_INPUT_ITF; +const int nof_output_itf = NOF_OUTPUT_ITF; + +#endif diff --git a/scripts/lib_binsource/test/test_generate.c b/scripts/lib_binsource/test/test_generate.c new file mode 100644 index 000000000..8a2c6185c --- /dev/null +++ b/scripts/lib_binsource/test/test_generate.c @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2012, Ismael Gomez-Miguelez . + * This file is part of ALOE++ (http://flexnets.upc.edu/) + * + * ALOE++ is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ALOE++ is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ALOE++. If not, see . + */ + +/* Functions that generate the test data fed into the DSP modules being developed */ +#include +#include +#include + +#include +#include + +#define INCLUDE_DEFS_ONLY +#include "binsource.h" + +int offset=0; + +/** + * Generates input signal. VERY IMPORTANT to fill length vector with the number of + * samples that have been generated. + * @param inp Input interface buffers. Data from other interfaces is stacked in the buffer. + * Use in(ptr,idx) to access the address. + * + * @param lengths Save on n-th position the number of samples generated for the n-th interface + */ +int generate_input_signal(void *in, int *lengths) +{ + int i; + input_t *input = in; + int block_length; + pmid_t blen_id; + + blen_id = param_id("block_length"); + if (!blen_id) { + moderror("Parameter block_length not found\n"); + return -1; + } + + if (!param_get_int(blen_id,&block_length)) { + moderror("Getting integer parameter block_length\n"); + return -1; + } + + modinfo_msg("Parameter block_length is %d\n",block_length); + + /** HERE INDICATE THE LENGTH OF THE SIGNAL */ + lengths[0] = block_length; + + for (i=0;i -o ' + sys.exit(2) +for opt, arg in opts: + if opt == '-h': + print argv[0] + ' -i -o ' + sys.exit() + elif opt in ("-i", "--input_file"): + input_file = arg + elif opt in ("-o", "--output_dir"): + output_dir = arg + +if input_file == None or output_dir == None: + print argv[0] + ' -i -o ' + sys.exit(2) + +filename=os.path.basename(input_file).split('.')[0] + +print filename + '\n' +print input_file + '\n' +print output_dir + '\n' + #m = Module("binsource") + + #m.readHeader(input_file) + #MakeModule(m,output_dir) + #print m.toString() + diff --git a/scripts/mod2xml.py b/scripts/mod2xml.py new file mode 100644 index 000000000..5203bd656 --- /dev/null +++ b/scripts/mod2xml.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python + +from module import Module +from xml2aloe import MakeModule + +m = Module("binsource") +m.readHeader('/home/ismael/work/osld-lib/scripts/binsource.h') +MakeModule(m) +print m.toString() \ No newline at end of file diff --git a/scripts/module.py b/scripts/module.py new file mode 100644 index 000000000..750798ff2 --- /dev/null +++ b/scripts/module.py @@ -0,0 +1,291 @@ + + +from xml.dom.minidom import Document,parseString +from pyclibrary import CParser + +class Module: +# name + nof_inputs=0 + nof_outputs=0 + init_params = [] + input_params = [] + output_params = [] + + def __init__(self,name): + self.name=name + self.init_func=name+'_initialize' + self.work_func=name+'_work' + self.handler=name+'_hl' + self.handler_instance=name + self.init_pm_struct=name+'_init' + self.input_pm_struct=name+'_ctrl_in' + self.output_pm_struct=name+'_ctrl_out' + self.directory_name = 'lib_' + name + + def set_input(self,nof_inputs, input_type, input_size): + self.nof_inputs = nof_inputs + self.input_type = input_type + self.input_size = input_size + + def set_output(self,nof_outputs, output_type, output_size): + self.nof_outputs = nof_outputs + self.output_type = output_type + self.output_size = output_size + + def add_init_param(self,name,type,default): + p={'name':name,'variable':self.name+'.init.'+name,'type':type,'default':default} + self.init_params.append(p) + + def add_input_param(self,name,type,default): + p={'name':name,'variable':self.name+'.ctrl_in.'+name,'type':type,'default':default} + self.input_params.append(p) + + def add_output_param(self,name,type,default): + p={'name':name,'variable':self.name+'.ctrl_in.'+name,'type':type,'default':default} + self.output_params.append(p) + + def toString(self): + s = 'name: ' + self.name + '\n' + s = s + 'handler: ' + self.handler + '\n' + if self.nof_inputs > 0: + s = s + str(self.nof_inputs) + ' ' + self.input_type + ' inputs of size ' + str(self.output.size) + '\n' + else: + s = s + 'no inputs\n' + if self.nof_outputs > 0: + s = s + str(self.nof_outputs) + ' ' + self.output_type + ' outputs of size ' + str(self.output_size) + '\n' + else: + s = s + 'no outputs\n' + + if self.init_params: + s = s + 'Initialization parameters:\n' + for p in self.init_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + ' = ' + p['default'] + '\n' + + if self.input_params: + s = s + 'Input parameters:\n' + for p in self.input_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + if p['default'] == None: + s = s + ' (Mandatory)\n' + else: + s = s + ' = ' + p['default'] + '\n' + + if self.output_params: + s = s + 'Output parameters:\n' + for p in self.output_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + '\n' + return s + + def toXML(self): + root = Document() + + module = root.createElement('module') + root.appendChild(module) + + module.setAttribute("name",self.name) + module.setAttribute("handler",self.handler) + module.setAttribute("handler_instance",self.handler_instance) + + # Functions + functions = root.createElement("functions") + module.appendChild(functions) + functions.setAttribute("initialize",self.init_func) + functions.setAttribute("work",self.work_func) + + # Interfaces + inputs = root.createElement("inputs") + module.appendChild(inputs) + inputs.setAttribute("num",str(self.nof_inputs)) + inputs.setAttribute("type",self.input_type) + inputs.setAttribute("size",self.input_size) + + outputs = root.createElement("outputs") + module.appendChild(outputs) + outputs.setAttribute("num",str(self.nof_outputs)) + outputs.setAttribute("type",self.output_type) + outputs.setAttribute("size",self.output_size) + + # Init Parameters + pinit = root.createElement("init_parameters") + module.appendChild(pinit) + for p in self.init_params: + pi = root.createElement("param") + pinit.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + pi.setAttribute("default",p['default']) + + pinput = root.createElement("input_parameters") + module.appendChild(pinput) + for p in self.input_params: + pi = root.createElement("param") + pinput.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + pi.setAttribute("default",p['default']) + + poutput = root.createElement("output_parameters") + module.appendChild(poutput) + for p in self.input_params: + pi = root.createElement("param") + pinput.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + + + return root.toprettyxml() + + def readXML(self, string): + root = parseString(string) + module = root.getElementsByTagName("module").item(0) + + self.name = module.getAttribute("name") + self.handler = module.getAttribute("handler") + self.handler_instance = module.getAttribute("handler_instance") + + functions = root.getElementsByTagName("functions").item(0) + self.init_func = functions.getAttribute("initialize") + self.work_func = functions.getAttribute("work") + + # Interfaces + inputs = root.getElementsByTagName("inputs").item(0) + self.nof_inputs = int(inputs.getAttribute("num")) + self.input_type = inputs.getAttribute("type") + self.input_size = inputs.getAttribute("size") + + outputs = root.getElementsByTagName("outputs").item(0) + self.nof_outputs = int(outputs.getAttribute("num")) + self.output_type = outputs.getAttribute("type") + self.output_size = outputs.getAttribute("size") + + pinit = root.getElementsByTagName("init_parameters").item(0) + for p in pinit.getElementsByTagName("params"): + self.init_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"), \ + 'type':p.getAttribute("type"),\ + 'default':p.getAttribute("default")}) + + pinput = root.getElementsByTagName("input_parameters").item(0) + for p in pinput.getElementsByTagName("params"): + self.input_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"),\ + 'type':p.getAttribute("type"),\ + 'default':p.getAttribute("default")}) + + poutput = root.getElementsByTagName("output_parameters").item(0) + for p in poutput.getElementsByTagName("params"): + self.output_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"),\ + 'type':p.getAttribute("type")}) + def findMember(self,members, name): + for m in members: + if m[0] == name: + return m + return None + + def findDefault(self, lines, variable): + for line in lines: + if variable in line: + if 'default' in line.lower(): + return str(int(line.split('=')[1].split('*/')[0])) + else: + return None + + def findSize(self, lines, variable): + for line in lines: + if variable in line: + if 'size' in line.lower(): + return line.split('=')[1].split('*/')[0] + else: + return None + + def findLinesStruct(self, lines, struct_name): + slines = [] + state = "nf" + for line in lines: + if state == 'nf': + if 'struct ' + struct_name in line: + state = "f1" + elif state == 'f1': + if '}' in line: + state = 'f2' + return slines + else: + slines.append(line) + + def findLinesHandler(self,file_name): + with open(file_name,'r') as f: + slines = [] + state = "nf" + for line in reversed(f.readlines()): + if state == 'nf': + if self.handler in line and '}' in line: + state = "f1" + elif state == 'f1': + if 'typedef' in line and 'struct' in line: + state = 'f2' + return reversed(slines) + else: + slines.append(line) + + def readHeader(self, file_name): + + p = CParser([file_name]) + h = p.defs['structs'][p.defs['types'][p.defs['types'][self.handler][0]][1]]['members'] + + input = self.findMember(h,'input') + if input == None: + self.nof_inputs = 0 + self.input_type = None + self.input_size = 0 + else: + self.nof_inputs = input[1][2][0] + self.input_type = input[1][0] + size = self.findSize(self.findLinesHandler(file_name), 'input') + if size == None: + size = '2048*20' + self.input_size = size + + output = self.findMember(h,'output') + if output == None: + self.nof_outputs = 0 + self.output_type = None + self.output_size = 0 + else: + self.nof_outputs = output[1][2][0] + self.output_type = output[1][0] + size = self.findSize(self.findLinesHandler(file_name), 'output') + if size == None: + size = '2048*20' + self.output_size = size + + initpm = p.defs['structs'].get(self.init_pm_struct) + if (initpm != None): + for m in initpm['members']: + default = self.findDefault(self.findLinesStruct(\ + self.findLinesHandler(file_name),\ + self.init_pm_struct), m[0]) + if default == None: + default = '0' + self.init_params.append({'name':m[0],'variable':self.name+'.init.'+m[0],\ + 'type':m[1][0],'default':default}) + + + inputpm = p.defs['structs'].get(self.input_pm_struct) + if (inputpm != None): + for m in inputpm['members']: + default = self.findDefault(self.findLinesStruct(\ + self.findLinesHandler(file_name),\ + self.input_pm_struct), m[0]) + self.input_params.append({'name':m[0],'variable':self.name+'.ctrl_in.'+m[0],\ + 'type':m[1][0],'default':default}) + + outputpm = p.defs['structs'].get(self.output_pm_struct) + if (outputpm != None): + for m in outputpm['members']: + self.output_params.append({'name':m[0],'variable':self.name+'.ctrl_out.'+m[0],\ + 'type':m[1][0]}) + \ No newline at end of file diff --git a/scripts/module.pyc b/scripts/module.pyc new file mode 100644 index 000000000..02ca6586f Binary files /dev/null and b/scripts/module.pyc differ diff --git a/scripts/module/__init__.py b/scripts/module/__init__.py new file mode 100644 index 000000000..776ab89f1 --- /dev/null +++ b/scripts/module/__init__.py @@ -0,0 +1,299 @@ + + +from xml.dom.minidom import Document,parseString +from pyclibrary import CParser + +class Module: +# name + nof_inputs=0 + nof_outputs=0 + init_params = [] + input_params = [] + output_params = [] + + def __init__(self,name): + self.name=name + self.init_func=name+'_initialize' + self.work_func=name+'_work' + self.handler=name+'_hl' + self.handler_instance=name + self.init_pm_struct=name+'_init' + self.input_pm_struct=name+'_ctrl_in' + self.output_pm_struct=name+'_ctrl_out' + self.directory_name = 'lib_' + name + self.library_name = 'osld' + + def set_input(self,nof_inputs, input_type, input_size): + self.nof_inputs = nof_inputs + self.input_type = input_type + self.input_size = input_size + + def set_output(self,nof_outputs, output_type, output_size): + self.nof_outputs = nof_outputs + self.output_type = output_type + self.output_size = output_size + + def add_init_param(self,name,type,default): + p={'name':name,'variable':self.name+'.init.'+name,'type':type,'default':default} + self.init_params.append(p) + + def add_input_param(self,name,type,default): + p={'name':name,'variable':self.name+'.ctrl_in.'+name,'type':type,'default':default} + self.input_params.append(p) + + def add_output_param(self,name,type,default): + p={'name':name,'variable':self.name+'.ctrl_in.'+name,'type':type,'default':default} + self.output_params.append(p) + + def toString(self): + s = 'name: ' + self.name + '\n' + s = s + 'handler: ' + self.handler + '\n' + s = s + 'directory: ' + self.directory_name + '\n' + s = s + 'library name: ' + self.library_name + '\n' + + if self.nof_inputs > 0: + s = s + str(self.nof_inputs) + ' ' + self.input_type + ' inputs of size ' + str(self.output.size) + '\n' + else: + s = s + 'no inputs\n' + if self.nof_outputs > 0: + s = s + str(self.nof_outputs) + ' ' + self.output_type + ' outputs of size ' + str(self.output_size) + '\n' + else: + s = s + 'no outputs\n' + + if self.init_params: + s = s + 'Initialization parameters:\n' + for p in self.init_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + ' = ' + p['default'] + '\n' + + if self.input_params: + s = s + 'Input parameters:\n' + for p in self.input_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + if p['default'] == None: + s = s + ' (Mandatory)\n' + else: + s = s + ' = ' + p['default'] + '\n' + + if self.output_params: + s = s + 'Output parameters:\n' + for p in self.output_params: + s = s + ' - ' + p['type'] + ' ' + p['name'] + '\n' + return s + + def toXML(self): + root = Document() + + module = root.createElement('module') + root.appendChild(module) + + module.setAttribute("name",self.name) + module.setAttribute("handler",self.handler) + module.setAttribute("handler_instance",self.handler_instance) + module.setAttribute("library",self.library_name) + module.setAttribute("directory_name",self.directory_name) + + # Functions + functions = root.createElement("functions") + module.appendChild(functions) + functions.setAttribute("initialize",self.init_func) + functions.setAttribute("work",self.work_func) + + # Interfaces + inputs = root.createElement("inputs") + module.appendChild(inputs) + inputs.setAttribute("num",str(self.nof_inputs)) + inputs.setAttribute("type",self.input_type) + inputs.setAttribute("size",self.input_size) + + outputs = root.createElement("outputs") + module.appendChild(outputs) + outputs.setAttribute("num",str(self.nof_outputs)) + outputs.setAttribute("type",self.output_type) + outputs.setAttribute("size",self.output_size) + + # Init Parameters + pinit = root.createElement("init_parameters") + module.appendChild(pinit) + for p in self.init_params: + pi = root.createElement("param") + pinit.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + pi.setAttribute("default",p['default']) + + pinput = root.createElement("input_parameters") + module.appendChild(pinput) + for p in self.input_params: + pi = root.createElement("param") + pinput.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + pi.setAttribute("default",p['default']) + + poutput = root.createElement("output_parameters") + module.appendChild(poutput) + for p in self.input_params: + pi = root.createElement("param") + pinput.appendChild(pi) + pi.setAttribute("name",p['name']) + pi.setAttribute("var",p['variable']) + pi.setAttribute("type",p['type']) + + + return root.toprettyxml() + + def readXML(self, string): + root = parseString(string) + module = root.getElementsByTagName("module").item(0) + + self.name = module.getAttribute("name") + self.handler = module.getAttribute("handler") + self.handler_instance = module.getAttribute("handler_instance") + self.directory_name = module.getAttribute("directory_name") + self.library_name = module.getAttribute("library") + + functions = root.getElementsByTagName("functions").item(0) + self.init_func = functions.getAttribute("initialize") + self.work_func = functions.getAttribute("work") + + # Interfaces + inputs = root.getElementsByTagName("inputs").item(0) + self.nof_inputs = int(inputs.getAttribute("num")) + self.input_type = inputs.getAttribute("type") + self.input_size = inputs.getAttribute("size") + + outputs = root.getElementsByTagName("outputs").item(0) + self.nof_outputs = int(outputs.getAttribute("num")) + self.output_type = outputs.getAttribute("type") + self.output_size = outputs.getAttribute("size") + + pinit = root.getElementsByTagName("init_parameters").item(0) + for p in pinit.getElementsByTagName("params"): + self.init_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"), \ + 'type':p.getAttribute("type"),\ + 'default':p.getAttribute("default")}) + + pinput = root.getElementsByTagName("input_parameters").item(0) + for p in pinput.getElementsByTagName("params"): + self.input_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"),\ + 'type':p.getAttribute("type"),\ + 'default':p.getAttribute("default")}) + + poutput = root.getElementsByTagName("output_parameters").item(0) + for p in poutput.getElementsByTagName("params"): + self.output_params.appendChild({'name':p.getAttribute("name"),\ + 'variable':p.getAttribute("variable"),\ + 'type':p.getAttribute("type")}) + def findMember(self,members, name): + for m in members: + if m[0] == name: + return m + return None + + def findDefault(self, lines, variable): + for line in lines: + if variable in line: + if 'default' in line.lower(): + return str(int(line.split('=')[1].split('*/')[0])) + else: + return None + + def findSize(self, lines, variable): + for line in lines: + if variable in line: + if 'size' in line.lower(): + return line.split('=')[1].split('*/')[0] + else: + return None + + def findLinesStruct(self, lines, struct_name): + slines = [] + state = "nf" + for line in lines: + if state == 'nf': + if 'struct ' + struct_name in line: + state = "f1" + elif state == 'f1': + if '}' in line: + state = 'f2' + return slines + else: + slines.append(line) + + def findLinesHandler(self,file_name): + with open(file_name,'r') as f: + slines = [] + state = "nf" + for line in reversed(f.readlines()): + if state == 'nf': + if self.handler in line and '}' in line: + state = "f1" + elif state == 'f1': + if 'typedef' in line and 'struct' in line: + state = 'f2' + return reversed(slines) + else: + slines.append(line) + + def readHeader(self, file_name): + + p = CParser([file_name]) + h = p.defs['structs'][p.defs['types'][p.defs['types'][self.handler][0]][1]]['members'] + + input = self.findMember(h,'input') + if input == None: + self.nof_inputs = 0 + self.input_type = None + self.input_size = 0 + else: + self.nof_inputs = input[1][2][0] + self.input_type = input[1][0] + size = self.findSize(self.findLinesHandler(file_name), 'input') + if size == None: + size = '2048*20' + self.input_size = size + + output = self.findMember(h,'output') + if output == None: + self.nof_outputs = 0 + self.output_type = None + self.output_size = 0 + else: + self.nof_outputs = output[1][2][0] + self.output_type = output[1][0] + size = self.findSize(self.findLinesHandler(file_name), 'output') + if size == None: + size = '2048*20' + self.output_size = size + + initpm = p.defs['structs'].get(self.init_pm_struct) + if (initpm != None): + for m in initpm['members']: + default = self.findDefault(self.findLinesStruct(\ + self.findLinesHandler(file_name),\ + self.init_pm_struct), m[0]) + if default == None: + default = '0' + self.init_params.append({'name':m[0],'variable':self.name+'.init.'+m[0],\ + 'type':m[1][0],'default':default}) + + + inputpm = p.defs['structs'].get(self.input_pm_struct) + if (inputpm != None): + for m in inputpm['members']: + default = self.findDefault(self.findLinesStruct(\ + self.findLinesHandler(file_name),\ + self.input_pm_struct), m[0]) + self.input_params.append({'name':m[0],'variable':self.name+'.ctrl_in.'+m[0],\ + 'type':m[1][0],'default':default}) + + outputpm = p.defs['structs'].get(self.output_pm_struct) + if (outputpm != None): + for m in outputpm['members']: + self.output_params.append({'name':m[0],'variable':self.name+'.ctrl_out.'+m[0],\ + 'type':m[1][0]}) + \ No newline at end of file diff --git a/scripts/module/__init__.pyc b/scripts/module/__init__.pyc new file mode 100644 index 000000000..40904f373 Binary files /dev/null and b/scripts/module/__init__.pyc differ diff --git a/scripts/module/pyclibrary/CLibrary.py b/scripts/module/pyclibrary/CLibrary.py new file mode 100644 index 000000000..17424bc4e --- /dev/null +++ b/scripts/module/pyclibrary/CLibrary.py @@ -0,0 +1,501 @@ +# -*- coding: utf-8 -*- +""" +CLibrary.py - Provides CLibrary class +Copyright 2010 Luke Campagnola +Distributed under MIT/X11 license. See license.txt for more infomation. + +Proxy to both CHeader and ctypes, allowing automatic type conversion and +function calling based on C header definitions. +""" + + +from ctypes import * +import sys + + +class CLibrary: + """The CLibrary class is intended to automate much of the work in using ctypes by integrating + header file definitions from CParser. Ths class serves as a proxy to a ctypes, adding + a few features: + - allows easy access to values defined via CParser + - automatic type conversions for function calls using CParser function signatures + - creates ctype classes based on type definitions from CParser + + Initialize using a ctypes shared object and a CParser: + headers = CParser.winDefs() + lib = CLibrary(windll.User32, headers) + + There are 3 ways to access library elements: + lib(type, name) - type can be one of 'values', 'functions', 'types', 'structs', 'unions', or 'enums'. + Returns an object matching name. For values, the value from the headers is + returned. For functions, a callable object is returned that handles automatic + type conversion for arguments and return values. for structs, types, and enums, + a ctypes class is returned matching the type specified. + lib.name - searches in order through values, functions, types, structs, unions, and enums from + header definitions and returns an object for the first match found. The object + returned is the same as returned by lib(type, name). This is the preferred way to access + elements from CLibrary, but may not work in some situations (for example, if + a struct and variable share the same name). + lib[type] - Accesses the header definitions directly, returns definition dictionaries + based on the type requested. This is equivalent to headers.defs[type]. + """ + Null = object() + + cTypes = { + 'char': c_char, + 'wchar': c_wchar, + 'unsigned char': c_ubyte, + 'short': c_short, + 'short int': c_short, + 'unsigned short': c_ushort, + 'unsigned short int': c_ushort, + 'int': c_int, + 'unsigned': c_uint, + 'unsigned int': c_uint, + 'long': c_long, + 'long int': c_long, + 'unsigned long': c_ulong, + 'unsigned long int': c_ulong, + '__int64': c_longlong, + 'long long': c_longlong, + 'long long int': c_longlong, + 'unsigned __int64': c_ulonglong, + 'unsigned long long': c_ulonglong, + 'unsigned long long int': c_ulonglong, + 'float': c_float, + 'double': c_double, + 'long double': c_longdouble + } + cPtrTypes = { + 'char': c_char_p, + 'wchar': c_wchar_p, + 'void': c_void_p + } + + + + def __init__(self, lib, headers, prefix=None): + ## name everything using underscores to avoid name collisions with library + + self._lib_ = lib + self._headers_ = headers + self._defs_ = headers.defs + if prefix is None: + self._prefix_ = [] + elif type(prefix) is list: + self._prefix_ = prefix + else: + self._prefix_ = [prefix] + self._objs_ = {} + for k in ['values', 'functions', 'types', 'structs', 'unions', 'enums']: + self._objs_[k] = {} + self._allObjs_ = {} + self._structs_ = {} + self._unions_ = {} + + def __call__(self, typ, name): + if typ not in self._objs_: + typs = self._objs_.keys() + raise Exception("Type must be one of %s" % str(typs)) + + if name not in self._objs_[typ]: + self._objs_[typ][name] = self._mkObj_(typ, name) + + return self._objs_[typ][name] + + def _allNames_(self, name): + return [name] + [p + name for p in self._prefix_] + + def _mkObj_(self, typ, name): + names = self._allNames_(name) + + for n in names: + if n in self._objs_: + return self._objs_[n] + + for n in names: ## try with and without prefix + if n not in self._defs_[typ] and not (typ in ['structs', 'unions', 'enums'] and n in self._defs_['types']): + continue + + if typ == 'values': + return self._defs_[typ][n] + elif typ == 'functions': + return self._getFunction(n) + elif typ == 'types': + obj = self._defs_[typ][n] + return self._ctype(obj) + elif typ == 'structs': + return self._cstruct('structs', n) + elif typ == 'unions': + return self._cstruct('unions', n) + elif typ == 'enums': + ## Allow automatic resolving of typedefs that alias enums + if n not in self._defs_['enums']: + if n not in self._defs_['types']: + raise Exception('No enums named "%s"' % n) + typ = self._headers_.evalType([n])[0] + if typ[:5] != 'enum ': + raise Exception('No enums named "%s"' % n) + n = self._defs_['types'][typ][1] ## look up internal name of enum + obj = self._defs_['enums'][n] + + return obj + else: + raise Exception("Unknown type %s" % typ) + raise NameError(name) + + + def __getattr__(self, name): + """Used to retrieve any type of definition from the headers. Searches for the name in this order: + values, functions, types, structs, unions, enums.""" + if name not in self._allObjs_: + names = self._allNames_(name) + for k in ['values', 'functions', 'types', 'structs', 'unions', 'enums', None]: + if k is None: + raise NameError(name) + obj = None + for n in names: + if n in self._defs_[k]: + obj = self(k, n) + break + if obj is not None: + break + self._allObjs_[name] = obj + return self._allObjs_[name] + + def __getitem__(self, name): + """Used to retrieve a specific dictionary from the headers.""" + return self._defs_[name] + + def __repr__(self): + return "" % str(self._lib_) + + def _getFunction(self, funcName): + try: + func = getattr(self._lib_, funcName) + except: + raise Exception("Function name '%s' appears in headers but not in library!" % func) + + #print "create function %s," % (funcName), self._defs_['functions'][funcName] + return CFunction(self, func, self._defs_['functions'][funcName], funcName) + + def _ctype(self, typ, pointers=True): + """return a ctype object representing the named type. + If pointers is True, the class returned includes all pointer/array specs provided. + Otherwise, the class returned is just the base type with no pointers.""" + try: + typ = self._headers_.evalType(typ) + mods = typ[1:][:] + + ## Create the initial type + ## Some types like ['char', '*'] have a specific ctype (c_char_p) + ## (but only do this if pointers == True) + if pointers and len(typ) > 1 and typ[1] == '*' and typ[0] in CLibrary.cPtrTypes: + cls = CLibrary.cPtrTypes[typ[0]] + mods = typ[2:] + + ## If the base type is in the list of existing ctypes: + elif typ[0] in CLibrary.cTypes: + cls = CLibrary.cTypes[typ[0]] + + ## structs, unions, enums: + elif typ[0][:7] == 'struct ': + cls = self._cstruct('structs', self._defs_['types'][typ[0]][1]) + elif typ[0][:6] == 'union ': + cls = self._cstruct('unions', self._defs_['types'][typ[0]][1]) + elif typ[0][:5] == 'enum ': + cls = c_int + + ## void + elif typ[0] == 'void': + cls = None + else: + #print typ + raise Exception("Can't find base type for %s" % str(typ)) + + if not pointers: + return cls + + ## apply pointers and arrays + while len(mods) > 0: + m = mods.pop(0) + if isinstance(m, basestring): ## pointer or reference + if m[0] == '*' or m[0] == '&': + for i in m: + cls = POINTER(cls) + elif type(m) is list: ## array + for i in m: + if i == -1: ## -1 indicates an 'incomplete type' like "int variable[]" + cls = POINTER(cls) ## which we should interpret like "int *variable" + else: + cls = cls * i + elif type(m) is tuple: ## Probably a function pointer + ## Find pointer and calling convention + isPtr = False + conv = '__cdecl' + if len(mods) == 0: + raise Exception("Function signature with no pointer:", m, mods) + for i in [0,1]: + if len(mods) < 1: + break + if mods[0] == '*': + mods.pop(0) + isPtr = True + elif mods[0] in ['__stdcall', '__cdecl']: + conv = mods.pop(0) + else: + break + if not isPtr: + raise Exception("Not sure how to handle type (function without single pointer): %s" % str(typ)) + + if conv == '__stdcall': + mkfn = WINFUNCTYPE + else: + mkfn = CFUNCTYPE + #print "Create function pointer (%s)" % conv + + args = [self._ctype(arg[1]) for arg in m] + cls = mkfn(cls, *args) + + else: + raise Exception("Not sure what to do with this type modifier: '%s'" % str(p)) + return cls + except: + print "Error while processing type", typ + raise + + def _cstruct(self, strType, strName): + if strName not in self._structs_: + + ## Resolve struct name--typedef aliases allowed. + if strName not in self._defs_[strType]: + if strName not in self._defs_['types']: + raise Exception('No struct/union named "%s"' % strName) + typ = self._headers_.evalType([strName])[0] + if typ[:7] != 'struct ' and typ[:6] != 'union ': + raise Exception('No struct/union named "%s"' % strName) + strName = self._defs_['types'][typ][1] + + ## Pull struct definition + defn = self._defs_[strType][strName] + + + ## create ctypes class + defs = defn['members'][:] + if strType == 'structs': + class s(Structure): + def __repr__(self): + return "" % strName + elif strType == 'unions': + class s(Union): + def __repr__(self): + return "" % strName + + + ## must register struct here to allow recursive definitions. + self._structs_[strName] = s + + if defn['pack'] is not None: + s._pack_ = defn['pack'] + + ## assign names to anonymous members + members = [] + anon = [] + for i in range(len(defs)): + if defs[i][0] is None: + c = 0 + while True: + name = 'anon_member%d' % c + if name not in members: + defs[i][0] = name + anon.append(name) + break + members.append(defs[i][0]) + + s._anonymous_ = anon + s._fields_ = [(m[0], self._ctype(m[1])) for m in defs] + s._defaults_ = [m[2] for m in defs] + return self._structs_[strName] + + + +class CFunction: + def __init__(self, lib, func, sig, name): + self.lib = lib + self.func = func + #print sig + self.sig = list(sig) # looks like [return_type, [(argName, type, default), (argName, type, default), ...]] + self.sig[1] = [s for s in sig[1] if s[1] != ['void']] ## remove void args from list + for conv in ['__stdcall', '__cdecl']: + if conv in self.sig[0]: + self.sig[0].remove(conv) + self.name = name + self.restype = lib._ctype(self.sig[0]) + #func.restype = self.restype + self.argTypes = [lib._ctype(s[1]) for s in self.sig[1]] + func.argtypes = self.argTypes + self.reqArgs = [x[0] for x in self.sig[1] if x[2] is None] + self.argInds = dict([(self.sig[1][i][0], i) for i in range(len(self.sig[1]))]) ## mapping from argument names to indices + #print "created func", self, sig, self.argTypes + + def argCType(self, arg): + """Return the ctype required for the specified argument. + arg can be either an integer or the name of the argument. + """ + if isinstance(arg, basestring): + arg = self.argInds[arg] + return self.lib._ctype(self.sig[1][arg][1]) + + def __call__(self, *args, **kwargs): + """Invoke the SO or dll function referenced, converting all arguments to the correct type. + Keyword arguments are allowed as long as the header specifies the argument names. + Arguments which are passed byref may be omitted entirely, and will be automaticaly generated. + To pass a NULL pointer, give None as the argument. + Returns the return value of the function call as well as all of the arguments (so that objects passed by reference can be retrieved)""" + #print "CALL: %s(%s)" % (self.name, ", ".join(map(str, args) + ["%s=%s" % (k, str(kwargs[k])) for k in kwargs])) + #print " sig:", self.sig + argList = [None] * max(len(self.reqArgs), len(args)) ## We'll need at least this many arguments. + + ## First fill in args + for i in range(len(args)): + #argList[i] = self.argTypes[i](args[i]) + if args[i] is None: + argList[i] = self.lib.Null + else: + argList[i] = args[i] + + ## Next fill in kwargs + for k in kwargs: + #print " kw:", k + if k not in self.argInds: + print "Function signature:", self.prettySignature() + raise Exception("Function signature has no argument named '%s'" % k) + ind = self.argInds[k] + if ind >= len(argList): ## stretch argument list if needed + argList += [None] * (ind - len(argList) + 1) + #argList[ind] = self.coerce(kwargs[k], self.argTypes[ind]) + if kwargs[k] is None: + argList[ind] = self.lib.Null + else: + argList[ind] = kwargs[k] + + guessedArgs = [] + ## Finally, fill in remaining arguments if they are pointers to int/float/void*/struct values + ## (we assume these are to be modified by the function and their initial value is not important) + for i in range(len(argList)): + if argList[i] is None or argList[i] is self.lib.Null: + try: + sig = self.sig[1][i][1] + argType = self.lib._headers_.evalType(sig) + if argList[i] is self.lib.Null: ## request to build a null pointer + if len(argType) < 2: + raise Exception("Can not create NULL for non-pointer argument type: %s" % str(argType)) + argList[i] = self.lib._ctype(sig)() + #elif argType == ['char', '*']: ## pass null pointer if none was specified. This is a little dangerous, but some functions will expect it. + #argList[i] = c_char_p() ## On second thought: let's just require the user to explicitly ask for a NULL pointer. + else: + if argType == ['void', '**'] or argType == ['void', '*', '*']: + cls = c_void_p + else: + assert len(argType) == 2 and argType[1] == '*' ## Must be 2-part type, second part must be '*' + cls = self.lib._ctype(sig, pointers=False) + argList[i] = pointer(cls(0)) + guessedArgs.append(i) + except: + if sys.exc_info()[0] is not AssertionError: + raise + #sys.excepthook(*sys.exc_info()) + print "Function signature:", self.prettySignature() + raise Exception("Function call '%s' missing required argument %d '%s'. (See above for signature)" % (self.name, i, self.sig[1][i][0])) + #print " args:", argList + try: + res = self.func(*argList) + except: + print "Function call failed. Signature is:", self.prettySignature() + print "Arguments:", argList + print "Argtypes:", self.func.argtypes + raise + #print " result:", res + + cr = CallResult(res, argList, self.sig, guessed=guessedArgs) + return cr + + def prettySignature(self): + return "%s %s(%s)" % (''.join(self.sig[0]), self.name, ', '.join(["%s %s" % ("".join(map(str, s[1])), s[0]) for s in self.sig[1]])) + +class CallResult: + """Class for bundling results from C function calls. Allows access to the function + return value as well as all of the arguments, since the function call will often return + extra values via these arguments. + - Original ctype objects can be accessed via result.rval or result.args + - Python values carried by these objects can be accessed using () + To access values: + - The return value: () + - The nth argument passed: [n] + - The argument by name: ['name'] + - All values that were auto-generated: .auto() + + The class can also be used as an iterator, so that tuple unpacking is possible: + ret, arg1, arg2 = lib.runSomeFunction(...) + """ + def __init__(self, rval, args, sig, guessed): + self.rval = rval ## return value of function call + self.args = args ## list of arguments to function call + self.sig = sig ## function signature + self.guessed = guessed ## list of arguments that were generated automatically (usually byrefs) + + def __call__(self): + #print "Clibrary:", type(self.rval), self.mkVal(self.rval) + if self.sig[0] == ['void']: + return None + return self.mkVal(self.rval) + + def __getitem__(self, n): + if type(n) is int: + return self.mkVal(self.args[n]) + elif type(n) is str: + ind = self.findArg(n) + return self.mkVal(self.args[ind]) + else: + raise Exception("Index must be int or str.") + + def __setitem__(self, n, val): + if type(n) is int: + self.args[n] = val + elif type(n) is str: + ind = self.findArg(n) + self.args[ind] = val + else: + raise Exception("Index must be int or str.") + + + def mkVal(self, obj): + while not hasattr(obj, 'value'): + if not hasattr(obj, 'contents'): + return obj + try: + obj = obj.contents + except ValueError: + return None + + return obj.value + + + def findArg(self, arg): + for i in range(len(self.sig[1])): + if self.sig[1][i][0] == arg: + return i + raise Exception("Can't find argument '%s' in function signature. Arguments are: %s" % (arg, str([a[0] for a in self.sig[1]]))) + + def __iter__(self): + yield self() + for i in range(len(self.args)): + yield(self[i]) + + def auto(self): + return [self[n] for n in self.guessed] + + + + + diff --git a/scripts/module/pyclibrary/CParser.py b/scripts/module/pyclibrary/CParser.py new file mode 100644 index 000000000..df2b4dea6 --- /dev/null +++ b/scripts/module/pyclibrary/CParser.py @@ -0,0 +1,1274 @@ +# -*- coding: utf-8 -*- +""" +CParser.py - C parsing library +Copyright 2010 Luke Campagnola +Distributed under MIT/X11 license. See license.txt for more infomation. + +Used for extracting data such as macro definitions, variables, typedefs, and function +signatures from C files (preferrably header files). +""" + +import sys, re, os + +__all__ = ['winDefs', 'CParser'] + + +def winDefs(verbose=False): + """Convenience function. Returns a parser which loads a selection of windows headers included with + CParser. These definitions can either be accessed directly or included before parsing + another file like this: + windefs = CParser.winDefs() + p = CParser.CParser("headerFile.h", copyFrom=windefs) + Definitions are pulled from a selection of header files included in Visual Studio + (possibly not legal to distribute? Who knows.), some of which have been abridged + because they take so long to parse. + """ + headerFiles = ['WinNt.h', 'WinDef.h', 'WinBase.h', 'BaseTsd.h', 'WTypes.h', 'WinUser.h'] + d = os.path.dirname(__file__) + p = CParser( + [os.path.join(d, 'headers', h) for h in headerFiles], + types={'__int64': ('long long')}, + macros={'_WIN32': '', '_MSC_VER': '800', 'CONST': 'const', 'NO_STRICT': None}, + processAll=False + ) + p.processAll(cache=os.path.join(d, 'headers', 'WinDefs.cache'), noCacheWarning=True, verbose=verbose) + return p + + +class CParser(): + """Class for parsing C code to extract variable, struct, enum, and function declarations as well as preprocessor macros. This is not a complete C parser; instead, it is meant to simplify the process + of extracting definitions from header files in the absence of a complete build system. Many files + will require some amount of manual intervention to parse properly (see 'replace' and extra arguments + to __init__) + + Usage: + ## create parser object, load two files + p = CParser(['header1.h', 'header2.h']) + + ## remove comments, preprocess, and search for declarations + p.processAll() + + ## just to see what was successfully parsed from the files + p.printAll() + + ## access parsed declarations + allValues = p.defs['values'] + functionSignatures = p.defs['functions'] + ... + + ## To see what was not successfully parsed: + unp = p.processAll(returnUnparsed=True) + for s in unp: + print s + """ + + cacheVersion = 22 ## increment every time cache structure or parsing changes to invalidate old cache files. + + def __init__(self, files=None, replace=None, copyFrom=None, processAll=True, cache=None, verbose=False, **args): + """Create a C parser object fiven a file or list of files. Files are read to memory and operated + on from there. + 'copyFrom' may be another CParser object from which definitions should be copied. + 'replace' may be specified to perform string replacements before parsing. + format is {'searchStr': 'replaceStr', ...} + Extra parameters may be used to specify the starting state of the parser. For example, + one could provide a set of missing type declarations by + types={'UINT': ('unsigned int'), 'STRING': ('char', 1)} + Similarly, preprocessor macros can be specified: + macros={'WINAPI': ''} + """ + + + self.defs = {} ## holds all definitions + self.fileDefs = {} ## holds definitions grouped by the file they came from + + self.initOpts = args.copy() + self.initOpts['files'] = [] + self.initOpts['replace'] = {} + + self.dataList = ['types', 'variables', 'fnmacros', 'macros', 'structs', 'unions', 'enums', 'functions', 'values'] + + self.verbose = False + + # placeholders for definitions that change during parsing + #if hasPyParsing: + #self.macroExpr = Forward() + #self.fnMacroExpr = Forward() + #self.definedType = Forward() + #self.definedStruct = Forward() + #self.definedEnum = Forward() + + self.fileOrder = [] + self.files = {} + self.packList = {} ## list describing struct packing rules as defined by #pragma pack + if files is not None: + if type(files) is str: + files = [files] + for f in files: + self.loadFile(f, replace) + + ## initialize empty definition lists + for k in self.dataList: + self.defs[k] = {} + #for f in files: + #self.fileDefs[f][k] = {} + + self.compiledTypes = {} ## holds translations from typedefs/structs/unions to fundamental types + + self.currentFile = None + + # Import extra arguments if specified + for t in args: + for k in args[t].keys(): + self.addDef(t, k, args[t][k]) + + # Import from other CParsers if specified + if copyFrom is not None: + if type(copyFrom) not in [list, tuple]: + copyFrom = [copyFrom] + for p in copyFrom: + self.importDict(p.fileDefs) + + if processAll: + self.processAll(cache=cache, verbose=verbose) + + def processAll(self, cache=None, returnUnparsed=False, printAfterPreprocess=False, noCacheWarning=True, verbose=False): + """Remove comments, preprocess, and parse declarations from all files. (operates in memory; does not alter the original files) + Returns a list of the results from parseDefs. + 'cache' may specify a file where cached results are be stored or retrieved. The cache + is automatically invalidated if any of the arguments to __init__ are changed, or if the + C files are newer than the cache. + 'returnUnparsed' is passed directly to parseDefs. + 'printAfterPreprocess' is for debugging; prints the result of preprocessing each file.""" + self.verbose = verbose + if cache is not None and self.loadCache(cache, checkValidity=True): + if verbose: + print "Loaded cached definitions; will skip parsing." + return ## cached values loaded successfully, nothing left to do here + #else: + #print "No cache.", cache + + + results = [] + if noCacheWarning or verbose: + print "Parsing C header files (no valid cache found). This could take several minutes..." + for f in self.fileOrder: + #fn = os.path.basename(f) + if self.files[f] is None: + ## This means the file could not be loaded and there was no cache. + raise Exception('Could not find header file "%s" or a suitable cache file.' % f) + if verbose: + print "Removing comments from file '%s'..." % f + self.removeComments(f) + if verbose: + print "Preprocessing file '%s'..." % f + self.preprocess(f) + if printAfterPreprocess: + print "===== PREPROCSSED %s =======" % f + print self.files[f] + if verbose: + print "Parsing definitions in file '%s'..." % f + results.append(self.parseDefs(f, returnUnparsed)) + + if cache is not None: + if verbose: + print "Writing cache file '%s'" % cache + self.writeCache(cache) + + return results + + + def loadCache(self, cacheFile, checkValidity=False): + """Load a cache file. Used internally if cache is specified in processAll(). + if checkValidity=True, then run several checks before loading the cache: + - cache file must not be older than any source files + - cache file must not be older than this library file + - options recorded in cache must match options used to initialize CParser""" + + ## make sure cache file exists + if type(cacheFile) is not str: + raise Exception("cache file option must be a string.") + if not os.path.isfile(cacheFile): + d = os.path.dirname(__file__) ## If file doesn't exist, search for it in this module's path + cacheFile = os.path.join(d, "headers", cacheFile) + if not os.path.isfile(cacheFile): + if self.verbose: + print "Can't find requested cache file." + return False + + ## make sure cache is newer than all input files + if checkValidity: + mtime = os.stat(cacheFile).st_mtime + for f in self.fileOrder: + ## if file does not exist, then it does not count against the validity of the cache. + if os.path.isfile(f) and os.stat(f).st_mtime > mtime: + if self.verbose: + print "Cache file is out of date." + return False + + try: + ## read cache file + import pickle + cache = pickle.load(open(cacheFile, 'rb')) + + ## make sure __init__ options match + if checkValidity: + if cache['opts'] != self.initOpts: + if self.verbose: + print "Cache file is not valid--created using different initialization options." + print cache['opts'] + print self.initOpts + return False + elif self.verbose: + print "Cache init opts are OK:" + print cache['opts'] + if cache['version'] < self.cacheVersion: + if self.verbose: + print "Cache file is not valid--cache format has changed." + return False + + ## import all parse results + self.importDict(cache['fileDefs']) + return True + except: + print "Warning--cache read failed:" + sys.excepthook(*sys.exc_info()) + return False + + def importDict(self, data): + """Import definitions from a dictionary. The dict format should be the + same as CParser.fileDefs. Used internally; does not need to be called + manually.""" + for f in data.keys(): + self.currentFile = f + for k in self.dataList: + for n in data[f][k]: + self.addDef(k, n, data[f][k][n]) + + def writeCache(self, cacheFile): + """Store all parsed declarations to cache. Used internally.""" + cache = {} + cache['opts'] = self.initOpts + cache['fileDefs'] = self.fileDefs + cache['version'] = self.cacheVersion + #for k in self.dataList: + #cache[k] = getattr(self, k) + import pickle + pickle.dump(cache, open(cacheFile, 'wb')) + + def loadFile(self, file, replace=None): + """Read a file, make replacements if requested. Called by __init__, should + not be called manually.""" + if not os.path.isfile(file): + ## Not a fatal error since we might be able to function properly if there is a cache file.. + #raise Exception("File %s not found" % file) + print "Warning: C header '%s' is missing; this may cause trouble." % file + self.files[file] = None + return False + + fd = open(file, 'rU') ## U causes all newline types to be converted to \n + self.files[file] = fd.read() + fd.close() + + if replace is not None: + for s in replace: + self.files[file] = re.sub(s, replace[s], self.files[file]) + self.fileOrder.append(file) + bn = os.path.basename(file) + self.initOpts['replace'][bn] = replace + self.initOpts['files'].append(bn) # only interested in the file names; the directory may change between systems. + return True + + + + + + #### Beginning of processing functions + + def assertPyparsing(self): + """Make sure pyparsing module is available.""" + global hasPyParsing + if not hasPyParsing: + raise Exception("CParser class requires 'pyparsing' library for actual parsing work. Without this library, CParser can only be used with previously cached parse results.") + + + def removeComments(self, file): + """Remove all comments from file. (operates in memory; does not alter the original files)""" + self.assertPyparsing() + text = self.files[file] + cplusplusLineComment = Literal("//") + restOfLine + # match quoted strings first to prevent matching comments inside quotes + self.files[file] = (quotedString | cStyleComment.suppress() | cplusplusLineComment.suppress()).transformString(text) + + + def preprocess(self, file): + """Scan named file for preprocessor directives, removing them while expanding macros. (operates in memory; does not alter the original files)""" + self.assertPyparsing() + self.buildParser() ## we need this so that evalExpr works properly + self.currentFile = file + packStack = [(None,None)] ## stack for #pragma pack push/pop + self.packList[file] = [(0,None)] + packing = None ## current packing value + + text = self.files[file] + + ## First join together lines split by \\n + text = Literal('\\\n').suppress().transformString(text) + + #self.ppDirective = Combine("#" + Word(alphas).leaveWhitespace()) + restOfLine + + # define the structure of a macro definition + name = Word(alphas+'_', alphanums+'_')('name') + self.ppDefine = name.setWhitespaceChars(' \t')("macro") + Optional(lparen + delimitedList(name) + rparen).setWhitespaceChars(' \t')('args') + SkipTo(LineEnd())('value') + self.ppDefine.setParseAction(self.processMacroDefn) + + #self.updateMacroDefns() + #self.updateFnMacroDefns() + + # define pattern for scanning through the input string + #self.macroExpander = (self.macroExpr | self.fnMacroExpr) + + ## Comb through lines, process all directives + lines = text.split('\n') + + result = [] + #macroExpander = (quotedString | self.macroExpander) + directive = re.compile(r'\s*#([a-zA-Z]+)(.*)$') + ifTrue = [True] + ifHit = [] + for i in range(len(lines)): + line = lines[i] + newLine = '' + m = directive.match(line) + if m is None: # regular code line + if ifTrue[-1]: # only include if we are inside the correct section of an IF block + #line = macroExpander.transformString(line) # expand all known macros + newLine = self.expandMacros(line) + else: # macro line + d = m.groups()[0] + rest = m.groups()[1] + + #print "PREPROCESS:", d, rest + if d == 'ifdef': + d = 'if' + rest = 'defined '+rest + elif d == 'ifndef': + d = 'if' + rest = '!defined '+rest + + ## Evaluate 'defined' operator before expanding macros + if d in ['if', 'elif']: + def pa(t): + return ['0', '1'][t['name'] in self.defs['macros'] or t['name'] in self.defs['fnmacros']] + rest = ( + Keyword('defined') + + (name | lparen + name + rparen) + ).setParseAction(pa).transformString(rest) + elif d in ['define', 'undef']: + macroName, rest = re.match(r'\s*([a-zA-Z_][a-zA-Z0-9_]*)(.*)$', rest).groups() + + ## Expand macros if needed + if rest is not None and (all(ifTrue) or d in ['if', 'elif']): + rest = self.expandMacros(rest) + + if d == 'elif': + if ifHit[-1] or not all(ifTrue[:-1]): + ev = False + else: + ev = self.evalPreprocessorExpr(rest) + if self.verbose: + print " "*(len(ifTrue)-2) + line, rest, ev + ifTrue[-1] = ev + ifHit[-1] = ifHit[-1] or ev + elif d == 'else': + if self.verbose: + print " "*(len(ifTrue)-2) + line, not ifHit[-1] + ifTrue[-1] = (not ifHit[-1]) and all(ifTrue[:-1]) + ifHit[-1] = True + elif d == 'endif': + ifTrue.pop() + ifHit.pop() + if self.verbose: + print " "*(len(ifTrue)-1) + line + elif d == 'if': + if all(ifTrue): + ev = self.evalPreprocessorExpr(rest) + else: + ev = False + if self.verbose: + print " "*(len(ifTrue)-1) + line, rest, ev + ifTrue.append(ev) + ifHit.append(ev) + elif d == 'define': + if not ifTrue[-1]: + continue + if self.verbose: + print " "*(len(ifTrue)) + "define:", macroName, rest + try: + self.ppDefine.parseString(macroName+ ' ' + rest) ## macro is registered here + except: + print "Error processing macro definition:", macroName, rest + print " ", sys.exc_info()[1] + elif d == 'undef': + if not ifTrue[-1]: + continue + try: + self.remDef('macros', macroName.strip()) + #self.macroListString = '|'.join(self.defs['macros'].keys() + self.defs['fnmacros'].keys()) + #self.updateMacroDefns() + except: + if sys.exc_info()[0] is not KeyError: + sys.excepthook(*sys.exc_info()) + print "Error removing macro definition '%s'" % macroName.strip() + elif d == 'pragma': ## Check for changes in structure packing + if not ifTrue[-1]: + continue + m = re.match(r'\s+pack\s*\(([^\)]+)\)', rest) + if m is None: + continue + opts = [s.strip() for s in m.groups()[0].split(',')] + + pushpop = id = val = None + for o in opts: + if o in ['push', 'pop']: + pushpop = o + elif o.isdigit(): + val = int(o) + else: + id = o + + if val is not None: + packing = val + + if pushpop == 'push': + packStack.append((packing, id)) + elif opts[0] == 'pop': + if id is None: + packStack.pop() + else: + ind = None + for i in range(len(packStack)): + if packStack[i][1] == id: + ind = i + break + if ind is not None: + packStack = packStack[:ind] + if val is None: + packing = packStack[-1][0] + else: + packing = int(opts[0]) + + if self.verbose: + print ">> Packing changed to %s at line %d" % (str(packing), i) + self.packList[file].append((i, packing)) + else: + pass ## Ignore any other directives + + result.append(newLine) + self.files[file] = '\n'.join(result) + + def evalPreprocessorExpr(self, expr): + ## make a few alterations so the expression can be eval'd + macroDiffs = ( + Literal('!').setParseAction(lambda: ' not ') | + Literal('&&').setParseAction(lambda: ' and ') | + Literal('||').setParseAction(lambda: ' or ') | + Word(alphas+'_',alphanums+'_').setParseAction(lambda: '0')) + expr2 = macroDiffs.transformString(expr) + + try: + ev = bool(eval(expr2)) + except: + if self.verbose: + print "Error evaluating preprocessor expression: %s [%s]" % (expr, expr2) + print " ", sys.exc_info()[1] + ev = False + return ev + + + + #def updateMacroDefns(self): + ##self.macroExpr << MatchFirst( [Keyword(m)('macro') for m in self.defs['macros']] ) + ##self.macroExpr.setParseAction(self.processMacroRef) + + ## regex is faster than pyparsing. + ## Matches quoted strings and macros + + ##names = self.defs['macros'].keys() + self.defs['fnmacros'].keys() + #if len(self.macroListString) == 0: + #self.macroRegex = None + #else: + #self.macroRegex = re.compile( + #r'("(\\"|[^"])*")|(\b(%s)\b)' % self.macroListString + #) + + #def updateFnMacroDefns(self): + #self.fnMacroExpr << MatchFirst( [(Keyword(m)('macro') + lparen + Group(delimitedList(expression))('args') + rparen) for m in self.defs['fnmacros']] ) + #self.fnMacroExpr.setParseAction(self.processFnMacroRef) + + + def processMacroDefn(self, t): + """Parse a #define macro and register the definition""" + if self.verbose: + print "MACRO:", t + #macroVal = self.macroExpander.transformString(t.value).strip() + #macroVal = Literal('\\\n').suppress().transformString(macroVal) ## remove escaped newlines + macroVal = t.value.strip() + if macroVal in self.defs['fnmacros']: + self.addDef('fnmacros', t.macro, self.defs['fnmacros'][macroVal]) + if self.verbose: + print " Copy fn macro %s => %s" % (macroVal, t.macro) + else: + if t.args == '': + val = self.evalExpr(macroVal) + self.addDef('macros', t.macro, macroVal) + self.addDef('values', t.macro, val) + if self.verbose: + print " Add macro:", t.macro, "("+str(val)+")", self.defs['macros'][t.macro] + else: + self.addDef('fnmacros', t.macro, self.compileFnMacro(macroVal, [x for x in t.args])) + if self.verbose: + print " Add fn macro:", t.macro, t.args, self.defs['fnmacros'][t.macro] + + #if self.macroListString == '': + #self.macroListString = t.macro + #else: + #self.macroListString += '|' + t.macro + #self.updateMacroDefns() + #self.macroExpr << MatchFirst( map(Keyword,self.defs['macros'].keys()) ) + return "#define " + t.macro + " " + macroVal + + + def compileFnMacro(self, text, args): + """Turn a function macro spec into a compiled description""" + ## find all instances of each arg in text + argRegex = re.compile(r'("(\\"|[^"])*")|(\b(%s)\b)' % ('|'.join(args))) + start = 0 + parts = [] + argOrder = [] + N = 3 + for m in argRegex.finditer(text): + arg = m.groups()[N] + #print m, arg + if arg is not None: + parts.append(text[start:m.start(N)] + '%s') + start = m.end(N) + argOrder.append(args.index(arg)) + parts.append(text[start:]) + return (''.join(parts), argOrder) + + + def expandMacros(self, line): + reg = re.compile(r'("(\\"|[^"])*")|(\b(\w+)\b)') + parts = [] + start = 0 + N = 3 ## the group number to check for macro names + macros = self.defs['macros'] + fnmacros = self.defs['fnmacros'] + for m in reg.finditer(line): + name = m.groups()[N] + if name in macros: + parts.append(line[start:m.start(N)]) + start = m.end(N) + parts.append(macros[name]) + elif name in fnmacros: + try: ## If function macro expansion fails, just ignore it. + exp, end = self.expandFnMacro(name, line[m.end(N):]) + parts.append(line[start:m.start(N)]) + start = end + m.end(N) + parts.append(exp) + except: + if sys.exc_info()[1][0] != 0: + print "Function macro expansion failed:", name, line[m.end(N):] + raise + parts.append(line[start:]) + return ''.join(parts) + + + + #def expandMacros(self, line): + #if self.macroRegex is None: + #return line + #parts = [] + #start = 0 + #N = 3 ## the group number to check for macro names + #for m in self.macroRegex.finditer(line): + #name = m.groups()[N] + #if name is not None: + #if name in self.defs['macros']: + #parts.append(line[start:m.start(N)]) + #start = m.end(N) + #parts.append(self.defs['macros'][name]) + #elif name in self.defs['fnmacros']: + #try: ## If function macro expansion fails, just ignore it. + #exp, end = self.expandFnMacro(name, line[m.end(N):]) + #parts.append(line[start:m.start(N)]) + #start = end + m.end(N) + #parts.append(exp) + #except: + #if sys.exc_info()[1][0] != 0: + #print "Function macro expansion failed:", name, line[m.end(N):] + #raise + + #else: + #raise Exception("Macro '%s' not found (internal error)" % name) + #parts.append(line[start:]) + #return ''.join(parts) + + def expandFnMacro(self, name, text): + #print "expandMacro:", name, text + defn = self.defs['fnmacros'][name] + ## defn looks like ('%s + %s / %s', (0, 0, 1)) + + argList = stringStart + lparen + Group(delimitedList(expression))('args') + rparen + res = [x for x in argList.scanString(text, 1)] + if len(res) == 0: + raise Exception(0, "Function macro '%s' not followed by (...)" % name) + args, start, end = res[0] + #print " ", res + #print " ", args + #print " ", defn + newStr = defn[0] % tuple([args[0][i] for i in defn[1]]) + #print " ", newStr + return (newStr, end) + + + # parse action to replace macro references with their respective definition + #def processMacroRef(self, t): + #return self.defs['macros'][t.macro] + + #def processFnMacroRef(self, t): + #m = self.defs['fnmacros'][t.macro] + ##print "=====>>" + ##print "Process FN MACRO:", t + ##print " macro defn:", t.macro, m + ##print " macro call:", t.args + ### m looks like ('a + b', ('a', 'b')) + #newStr = m[0][:] + ##print " starting str:", newStr + #try: + #for i in range(len(m[1])): + ##print " step", i + #arg = m[1][i] + ##print " arg:", arg, '=>', t.args[i] + + #newStr = Keyword(arg).copy().setParseAction(lambda: t.args[i]).transformString(newStr) + ##print " new str:", newStr + #except: + ##sys.excepthook(*sys.exc_info()) + #raise + ##print "<<=====" + #return newStr + + + + + + + + + def parseDefs(self, file, returnUnparsed=False): + """Scan through the named file for variable, struct, enum, and function declarations. + Returns the entire tree of successfully parsed tokens. + If returnUnparsed is True, return a string of all lines that failed to match (for debugging).""" + self.assertPyparsing() + self.currentFile = file + #self.definedType << kwl(self.defs['types'].keys()) + + parser = self.buildParser() + if returnUnparsed: + text = parser.suppress().transformString(self.files[file]) + return re.sub(r'\n\s*\n', '\n', text) + else: + return [x[0] for x in parser.scanString(self.files[file])] + + def buildParser(self): + """Builds the entire tree of parser elements for the C language (the bits we support, anyway). + """ + + if hasattr(self, 'parser'): + return self.parser + + + self.assertPyparsing() + + + self.structType = Forward() + self.enumType = Forward() + self.typeSpec = (typeQualifier + ( + fundType | + Optional(kwl(sizeModifiers + signModifiers)) + ident | + self.structType | + self.enumType + ) + typeQualifier + msModifier).setParseAction(recombine) + #self.argList = Forward() + + ### Abstract declarators for use in function pointer arguments + # Thus begins the extremely hairy business of parsing C declarators. + # Whomever decided this was a reasonable syntax should probably never breed. + # The following parsers combined with the processDeclarator function + # allow us to turn a nest of type modifiers into a correctly + # ordered list of modifiers. + + self.declarator = Forward() + self.abstractDeclarator = Forward() + + ## abstract declarators look like: + # + # * + # **[num] + # (*)(int, int) + # *( )(int, int)[10] + # ...etc... + self.abstractDeclarator << Group( + typeQualifier + Group(ZeroOrMore('*'))('ptrs') + typeQualifier + + ((Optional('&')('ref')) | (lparen + self.abstractDeclarator + rparen)('center')) + + Optional(lparen + Optional(delimitedList(Group( + self.typeSpec('type') + + self.abstractDeclarator('decl') + + Optional(Literal('=').suppress() + expression, default=None)('val') + )), default=None) + rparen)('args') + + Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + rbrack))('arrays') + ) + + ## Argument list may consist of declarators or abstract declarators + #self.argList << delimitedList(Group( + #self.typeSpec('type') + + #(self.declarator('decl') | self.abstractDeclarator('decl')) + + #Optional(Keyword('=')) + expression + #)) + + ## declarators look like: + # varName + # *varName + # **varName[num] + # (*fnName)(int, int) + # * fnName(int arg1=0)[10] + # ...etc... + self.declarator << Group( + typeQualifier + callConv + Group(ZeroOrMore('*'))('ptrs') + typeQualifier + + ((Optional('&')('ref') + ident('name')) | (lparen + self.declarator + rparen)('center')) + + Optional(lparen + Optional(delimitedList(Group( + self.typeSpec('type') + + (self.declarator | self.abstractDeclarator)('decl') + + Optional(Literal('=').suppress() + expression, default=None)('val') + )), default=None) + rparen)('args') + + Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + rbrack))('arrays') + ) + self.declaratorList = Group(delimitedList(self.declarator)) + + ## typedef + self.typeDecl = Keyword('typedef') + self.typeSpec('type') + self.declaratorList('declList') + semi + self.typeDecl.setParseAction(self.processTypedef) + + ## variable declaration + self.variableDecl = Group(self.typeSpec('type') + Optional(self.declaratorList('declList')) + Optional(Literal('=').suppress() + (expression('value') | (lbrace + Group(delimitedList(expression))('arrayValues') + rbrace)))) + semi + + self.variableDecl.setParseAction(self.processVariable) + + ## function definition + #self.paramDecl = Group(self.typeSpec + (self.declarator | self.abstractDeclarator)) + Optional(Literal('=').suppress() + expression('value')) + self.typelessFunctionDecl = self.declarator('decl') + nestedExpr('{', '}').suppress() + self.functionDecl = self.typeSpec('type') + self.declarator('decl') + nestedExpr('{', '}').suppress() + self.functionDecl.setParseAction(self.processFunction) + + + ## Struct definition + self.structDecl = Forward() + structKW = (Keyword('struct') | Keyword('union')) + #self.structType << structKW('structType') + ((Optional(ident)('name') + lbrace + Group(ZeroOrMore( Group(self.structDecl | self.variableDecl.copy().setParseAction(lambda: None)) ))('members') + rbrace) | ident('name')) + self.structMember = ( + Group(self.variableDecl.copy().setParseAction(lambda: None)) | + (self.typeSpec + self.declarator + nestedExpr('{', '}')).suppress() | + (self.declarator + nestedExpr('{', '}')).suppress() + ) + self.declList = lbrace + Group(OneOrMore(self.structMember))('members') + rbrace + self.structType << (Keyword('struct') | Keyword('union'))('structType') + ((Optional(ident)('name') + self.declList) | ident('name')) + + self.structType.setParseAction(self.processStruct) + #self.updateStructDefn() + + self.structDecl = self.structType + semi + + ## enum definition + enumVarDecl = Group(ident('name') + Optional(Literal('=').suppress() + (integer('value') | ident('valueName')))) + + self.enumType << Keyword('enum') + (Optional(ident)('name') + lbrace + Group(delimitedList(enumVarDecl))('members') + rbrace | ident('name')) + self.enumType.setParseAction(self.processEnum) + + self.enumDecl = self.enumType + semi + + + #self.parser = (self.typeDecl | self.variableDecl | self.structDecl | self.enumDecl | self.functionDecl) + self.parser = (self.typeDecl | self.variableDecl | self.functionDecl) + return self.parser + + def processDeclarator(self, decl): + """Process a declarator (without base type) and return a tuple (name, [modifiers]) + See processType(...) for more information.""" + toks = [] + name = None + #print "DECL:", decl + if 'callConv' in decl and len(decl['callConv']) > 0: + toks.append(decl['callConv']) + if 'ptrs' in decl and len(decl['ptrs']) > 0: + toks.append('*' * len(decl['ptrs'])) + if 'arrays' in decl and len(decl['arrays']) > 0: + #arrays = [] + #for x in decl['arrays']: + #n = self.evalExpr(x) + #if n == -1: ## If an array was given as '[]', interpret it as '*' instead. + #toks.append('*') + #else: + #arrays.append(n) + #if len(arrays) > 0: + #toks.append(arrays) + toks.append([self.evalExpr(x) for x in decl['arrays']]) + if 'args' in decl and len(decl['args']) > 0: + #print " process args" + if decl['args'][0] is None: + toks.append(()) + else: + toks.append(tuple([self.processType(a['type'], a['decl']) + (a['val'][0],) for a in decl['args']])) + if 'ref' in decl: + toks.append('&') + if 'center' in decl: + (n, t) = self.processDeclarator(decl['center'][0]) + if n is not None: + name = n + toks.extend(t) + if 'name' in decl: + name = decl['name'] + return (name, toks) + + def processType(self, typ, decl): + """Take a declarator + base type and return a serialized name/type description. + The description will be a list of elements (name, [basetype, modifier, modifier, ...]) + - name is the string name of the declarator or None for an abstract declarator + - basetype is the string representing the base type + - modifiers can be: + '*' - pointer (multiple pointers "***" allowed) + '&' - reference + '__X' - calling convention (windows only). X can be 'cdecl' or 'stdcall' + list - array. Value(s) indicate the length of each array, -1 for incomplete type. + tuple - function, items are the output of processType for each function argument. + + Examples: + int *x[10] => ('x', ['int', [10], '*']) + char fn(int x) => ('fn', ['char', [('x', ['int'])]]) + struct s (*)(int, int*) => (None, ["struct s", ((None, ['int']), (None, ['int', '*'])), '*']) + """ + #print "PROCESS TYPE/DECL:", typ, decl + (name, decl) = self.processDeclarator(decl) + return (name, [typ] + decl) + + + + def processEnum(self, s, l, t): + try: + if self.verbose: + print "ENUM:", t + if t.name == '': + n = 0 + while True: + name = 'anonEnum%d' % n + if name not in self.defs['enums']: + break + n += 1 + else: + name = t.name[0] + + if self.verbose: + print " name:", name + + if name not in self.defs['enums']: + i = 0 + enum = {} + for v in t.members: + if v.value != '': + i = eval(v.value) + if v.valueName != '': + i = enum[v.valueName] + enum[v.name] = i + self.addDef('values', v.name, i) + i += 1 + if self.verbose: + print " members:", enum + self.addDef('enums', name, enum) + self.addDef('types', 'enum '+name, ('enum', name)) + return ('enum ' + name) + except: + if self.verbose: + print "Error processing enum:", t + sys.excepthook(*sys.exc_info()) + + + def processFunction(self, s, l, t): + if self.verbose: + print "FUNCTION", t, t.keys() + + try: + (name, decl) = self.processType(t.type, t.decl[0]) + if len(decl) == 0 or type(decl[-1]) != tuple: + print t + raise Exception("Incorrect declarator type for function definition.") + if self.verbose: + print " name:", name + print " sig:", decl + self.addDef('functions', name, (decl[:-1], decl[-1])) + + except: + if self.verbose: + print "Error processing function:", t + sys.excepthook(*sys.exc_info()) + + + def packingAt(self, line): + """Return the structure packing value at the given line number""" + packing = None + for p in self.packList[self.currentFile]: + if p[0] <= line: + packing = p[1] + else: + break + return packing + + def processStruct(self, s, l, t): + try: + strTyp = t.structType # struct or union + + ## check for extra packing rules + packing = self.packingAt(lineno(l, s)) + + if self.verbose: + print strTyp.upper(), t.name, t + if t.name == '': + n = 0 + while True: + sname = 'anon_%s%d' % (strTyp, n) + if sname not in self.defs[strTyp+'s']: + break + n += 1 + else: + if type(t.name) is str: + sname = t.name + else: + sname = t.name[0] + if self.verbose: + print " NAME:", sname + if len(t.members) > 0 or sname not in self.defs[strTyp+'s'] or self.defs[strTyp+'s'][sname] == {}: + if self.verbose: + print " NEW " + strTyp.upper() + struct = [] + for m in t.members: + typ = m[0].type + val = self.evalExpr(m) + if self.verbose: + print " member:", m, m[0].keys(), m[0].declList + if len(m[0].declList) == 0: ## anonymous member + struct.append((None, [typ], None)) + for d in m[0].declList: + (name, decl) = self.processType(typ, d) + struct.append((name, decl, val)) + if self.verbose: + print " ", name, decl, val + self.addDef(strTyp+'s', sname, {'pack': packing, 'members': struct}) + self.addDef('types', strTyp+' '+sname, (strTyp, sname)) + #self.updateStructDefn() + return strTyp+' '+sname + except: + #print t + sys.excepthook(*sys.exc_info()) + + def processVariable(self, s, l, t): + if self.verbose: + print "VARIABLE:", t + try: + val = self.evalExpr(t[0]) + for d in t[0].declList: + (name, typ) = self.processType(t[0].type, d) + if type(typ[-1]) is tuple: ## this is a function prototype + if self.verbose: + print " Add function prototype:", name, typ, val + self.addDef('functions', name, (typ[:-1], typ[-1])) + else: + if self.verbose: + print " Add variable:", name, typ, val + self.addDef('variables', name, (val, typ)) + self.addDef('values', name, val) + except: + #print t, t[0].name, t.value + sys.excepthook(*sys.exc_info()) + + def processTypedef(self, s, l, t): + if self.verbose: + print "TYPE:", t + typ = t.type + #print t, t.type + for d in t.declList: + (name, decl) = self.processType(typ, d) + if self.verbose: + print " ", name, decl + self.addDef('types', name, decl) + #self.definedType << MatchFirst( map(Keyword,self.defs['types'].keys()) ) + + def evalExpr(self, toks): + ## Evaluates expressions. Currently only works for expressions that also + ## happen to be valid python expressions. + ## This function does not currently include previous variable + ## declarations, but that should not be too difficult to implement.. + #print "Eval:", toks + try: + if isinstance(toks, basestring): + #print " as string" + val = self.eval(toks, None, self.defs['values']) + elif toks.arrayValues != '': + #print " as list:", toks.arrayValues + val = [self.eval(x, None, self.defs['values']) for x in toks.arrayValues] + elif toks.value != '': + #print " as value" + val = self.eval(toks.value, None, self.defs['values']) + else: + #print " as None" + val = None + return val + except: + if self.verbose: + print " failed eval:", toks + print " ", sys.exc_info()[1] + return None + + def eval(self, expr, *args): + """Just eval with a little extra robustness.""" + expr = expr.strip() + cast = (lparen + self.typeSpec + self.abstractDeclarator + rparen).suppress() + expr = (quotedString | number | cast).transformString(expr) + if expr == '': + return None + return eval(expr, *args) + + def printAll(self, file=None): + """Print everything parsed from files. Useful for debugging.""" + from pprint import pprint + for k in self.dataList: + print "============== %s ==================" % k + if file is None: + pprint(self.defs[k]) + else: + pprint(self.fileDefs[file][k]) + + def addDef(self, typ, name, val): + """Add a definition of a specific type to both the definition set for the current file and the global definition set.""" + self.defs[typ][name] = val + if self.currentFile is None: + baseName = None + else: + baseName = os.path.basename(self.currentFile) + if baseName not in self.fileDefs: + self.fileDefs[baseName] = {} + for k in self.dataList: + self.fileDefs[baseName][k] = {} + self.fileDefs[baseName][typ][name] = val + + def remDef(self, typ, name): + if self.currentFile is None: + baseName = None + else: + baseName = os.path.basename(self.currentFile) + del self.defs[typ][name] + del self.fileDefs[baseName][typ][name] + + + def isFundType(self, typ): + """Return True if this type is a fundamental C type, struct, or union""" + if typ[0][:7] == 'struct ' or typ[0][:6] == 'union ' or typ[0][:5] == 'enum ': + return True + + names = baseTypes + sizeModifiers + signModifiers + for w in typ[0].split(): + if w not in names: + return False + return True + + def evalType(self, typ): + """evaluate a named type into its fundamental type""" + used = [] + while True: + if self.isFundType(typ): + ## remove 'signed' before returning evaluated type + typ[0] = re.sub(r'\bsigned\b', '', typ[0]).strip() + + + return typ + parent = typ[0] + if parent in used: + raise Exception('Recursive loop while evaluating types. (typedefs are %s)' % (' -> '.join(used+[parent]))) + used.append(parent) + if not parent in self.defs['types']: + raise Exception('Unknown type "%s" (typedefs are %s)' % (parent, ' -> '.join(used))) + pt = self.defs['types'][parent] + typ = pt + typ[1:] + + def find(self, name): + """Search all definitions for the given name""" + res = [] + for f in self.fileDefs: + fd = self.fileDefs[f] + for t in fd: + typ = fd[t] + for k in typ: + if isinstance(name, basestring): + if k == name: + res.append((f, t)) + else: + if re.match(name, k): + res.append((f, t, k)) + return res + + + + def findText(self, text): + """Search all file strings for text, return matching lines.""" + res = [] + for f in self.files: + l = self.files[f].split('\n') + for i in range(len(l)): + if text in l[i]: + res.append((f, i, l[i])) + return res + + +hasPyParsing = False +try: + from pyparsing import * + ParserElement.enablePackrat() + hasPyParsing = True +except: + pass ## no need to do anything yet as we might not be using any parsing functions.. + + +## Define some common language elements if pyparsing is available. +if hasPyParsing: + ## Some basic definitions + expression = Forward() + pexpr = '(' + expression + ')' + numTypes = ['int', 'float', 'double', '__int64'] + baseTypes = ['char', 'bool', 'void'] + numTypes + sizeModifiers = ['short', 'long'] + signModifiers = ['signed', 'unsigned'] + qualifiers = ['const', 'static', 'volatile', 'inline', 'restrict', 'near', 'far'] + msModifiers = ['__based', '__declspec', '__fastcall', '__restrict', '__sptr', '__uptr', '__w64', '__unaligned', '__nullterminated'] + keywords = ['struct', 'enum', 'union', '__stdcall', '__cdecl'] + qualifiers + baseTypes + sizeModifiers + signModifiers + + def kwl(strs): + """Generate a match-first list of keywords given a list of strings.""" + #return MatchFirst(map(Keyword,strs)) + return Regex(r'\b(%s)\b' % '|'.join(strs)) + + keyword = kwl(keywords) + wordchars = alphanums+'_$' + ident = (WordStart(wordchars) + ~keyword + Word(alphas+"_",alphanums+"_$") + WordEnd(wordchars)).setParseAction(lambda t: t[0]) + #integer = Combine(Optional("-") + (Word( nums ) | Combine("0x" + Word(hexnums)))) + semi = Literal(";").ignore(quotedString).suppress() + lbrace = Literal("{").ignore(quotedString).suppress() + rbrace = Literal("}").ignore(quotedString).suppress() + lbrack = Literal("[").ignore(quotedString).suppress() + rbrack = Literal("]").ignore(quotedString).suppress() + lparen = Literal("(").ignore(quotedString).suppress() + rparen = Literal(")").ignore(quotedString).suppress() + hexint = Regex('-?0x[%s]+[UL]*'%hexnums).setParseAction(lambda t: t[0].rstrip('UL')) + decint = Regex(r'-?\d+[UL]*').setParseAction(lambda t: t[0].rstrip('UL')) + integer = (hexint | decint) + floating = Regex(r'-?((\d+(\.\d*)?)|(\.\d+))([eE]-?\d+)?') + number = (hexint | floating | decint) + bitfieldspec = ":" + integer + biOperator = oneOf("+ - / * | & || && ! ~ ^ % == != > < >= <= -> . :: << >> = ? :") + uniRightOperator = oneOf("++ --") + uniLeftOperator = oneOf("++ -- - + * sizeof new") + name = (WordStart(wordchars) + Word(alphas+"_",alphanums+"_$") + WordEnd(wordchars)) + #number = Word(hexnums + ".-+xUL").setParseAction(lambda t: t[0].rstrip('UL')) + #stars = Optional(Word('*&'), default='')('ptrs') ## may need to separate & from * later? + callConv = Optional(Keyword('__cdecl')|Keyword('__stdcall'))('callConv') + + ## Removes '__name' from all type specs.. may cause trouble. + underscore2Ident = (WordStart(wordchars) + ~keyword + '__' + Word(alphanums,alphanums+"_$") + WordEnd(wordchars)).setParseAction(lambda t: t[0]) + typeQualifier = ZeroOrMore((underscore2Ident + Optional(nestedExpr())) | kwl(qualifiers)).suppress() + + msModifier = ZeroOrMore(kwl(msModifiers) + Optional(nestedExpr())).suppress() + pointerOperator = ( + '*' + typeQualifier | + '&' + typeQualifier | + '::' + ident + typeQualifier + ) + + + ## language elements + fundType = OneOrMore(kwl(signModifiers + sizeModifiers + baseTypes)).setParseAction(lambda t: ' '.join(t)) + + + + ## Is there a better way to process expressions with cast operators?? + castAtom = ( + ZeroOrMore(uniLeftOperator) + Optional('('+ident+')').suppress() + + (( + ident + '(' + Optional(delimitedList(expression)) + ')' | + ident + OneOrMore('[' + expression + ']') | + ident | number | quotedString + ) | + ('(' + expression + ')')) + + ZeroOrMore(uniRightOperator) + ) + uncastAtom = ( + ZeroOrMore(uniLeftOperator) + + (( + ident + '(' + Optional(delimitedList(expression)) + ')' | + ident + OneOrMore('[' + expression + ']') | + ident | number | quotedString + ) | + ('(' + expression + ')')) + + ZeroOrMore(uniRightOperator) + ) + atom = castAtom | uncastAtom + + expression << Group( + atom + ZeroOrMore(biOperator + atom) + ) + arrayOp = lbrack + expression + rbrack + + def recombine(tok): + """Flattens a tree of tokens and joins into one big string.""" + return " ".join(flatten(tok.asList())) + expression.setParseAction(recombine) + + def flatten(lst): + res = [] + for i in lst: + if type(i) in [list, tuple]: + res.extend(flatten(i)) + else: + res.append(str(i)) + return res + + def printParseResults(pr, depth=0, name=''): + """For debugging; pretty-prints parse result objects.""" + start = name + " "*(20-len(name)) + ':'+ '..'*depth + if isinstance(pr, ParseResults): + print start + for i in pr: + name = '' + for k in pr.keys(): + if pr[k] is i: + name = k + break + printParseResults(i, depth+1, name) + else: + print start + str(pr) + + + +## Just for fun.. +if __name__ == '__main__': + files = sys.argv[1:] + p = CParser(files) + p.processAll() + p.printAll() + \ No newline at end of file diff --git a/scripts/module/pyclibrary/README.md b/scripts/module/pyclibrary/README.md new file mode 100644 index 000000000..f1af9afb7 --- /dev/null +++ b/scripts/module/pyclibrary/README.md @@ -0,0 +1,8 @@ +pyclibrary +========== + +C parser and ctypes automation for Python. + +Fork of . (`bzr branch lp:pyclibrary pyclibrary-bzr && mkdir pyclibrary && cd pyclibrary && bar fast-export --plain ../pyclibrary-bzr | git fast-import`) + +Pyclibrary includes 1) a pure-python C parser and 2) a ctypes automation library that uses C header file definitions to simplify the use of ctypes. The C parser currently processes all macros, typedefs, structs, unions, enums, function prototypes, and global variable declarations, and can evaluate typedefs down to their fundamental C types + pointers/arrays/function signatures. Pyclibrary can automatically build ctypes structs/unions and perform type conversions when calling functions via cdll/windll. diff --git a/scripts/module/pyclibrary/__init__.py b/scripts/module/pyclibrary/__init__.py new file mode 100644 index 000000000..618aaa1a8 --- /dev/null +++ b/scripts/module/pyclibrary/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +from CParser import * +from CLibrary import * \ No newline at end of file diff --git a/scripts/module/pyclibrary/license.txt b/scripts/module/pyclibrary/license.txt new file mode 100644 index 000000000..3d04b87ea --- /dev/null +++ b/scripts/module/pyclibrary/license.txt @@ -0,0 +1,7 @@ +Copyright (c) 2010 Luke Campagnola ('luke.campagnola@%s.com' % 'gmail') + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/scripts/module/pyclibrary/pyparsing.py b/scripts/module/pyclibrary/pyparsing.py new file mode 100644 index 000000000..dec506ed0 --- /dev/null +++ b/scripts/module/pyclibrary/pyparsing.py @@ -0,0 +1,3754 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2011 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +#from __future__ import generators + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word( alphas ) + "," + Word( alphas ) + "!" + + hello = "Hello, World!" + print hello, "->", greet.parseString( hello ) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments +""" + +__version__ = "1.5.6" +__versionTime__ = "1 May 2011 23:41" +__author__ = "Paul McGuire " + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'getTokensEndLoc', 'hexnums', +'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', +] + +""" +Detect if we are running version 3.X and make appropriate changes +Robert A. Clark +""" +_PY3K = sys.version_info[0] > 2 +if _PY3K: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + alphas = string.ascii_lowercase + string.ascii_uppercase +else: + _MAX_INT = sys.maxint + range = xrange + set = lambda s : dict( [(c,0) for c in s] ) + alphas = string.lowercase + string.uppercase + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # The Python docs (http://docs.python.org/ref/customization.html#l2h-182) + # state that "The return value must be a string object". However, does a + # unicode object (being a subclass of basestring) count as a "string + # object"? + # If so, then return a unicode object: + return unicode(obj) + # Else encode it... but how? There are many choices... :) + # Replace unprintables with escape codes? + #return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors') + # Replace unprintables with question marks? + #return unicode(obj).encode(sys.getdefaultencoding(), 'replace') + # ... + + alphas = string.lowercase + string.uppercase + +# build list of single arg builtins, tolerant of Python version, that can be used as parse actions +singleArgBuiltins = [] +import __builtin__ +for fname in "sum len enumerate sorted reversed list tuple set any all".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ['&'+s+';' for s in "amp gt lt quot apos".split()] + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +nums = string.digits +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join( [ c for c in string.printable if c not in string.whitespace ] ) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join( [line_str[:line_column], + markerString, line_str[line_column:]]) + return line_str.strip() + def __dir__(self): + return "loc msg pstr parserElement lineno col line " \ + "markInputLine __str__ __repr__".split() + +class ParseException(ParseBaseException): + """exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like C{ParseFatalException}, but thrown internally when an + C{ErrorStop} ('-' operator) indicates that parsing is to stop immediately because + an unbacktrackable syntax error has been found""" + def __init__(self, pe): + super(ParseSyntaxException, self).__init__( + pe.pstr, pe.loc, pe.msg, pe.parserElement) + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by C{validate()} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.}) + """ + #~ __slots__ = ( "__toklist", "__tokdict", "__doinit", "__name", "__parent", "__accumNames", "__weakref__" ) + def __new__(cls, toklist, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + if isinstance(toklist, list): + self.__toklist = toklist[:] + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not toklist in (None,'',[]): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,int): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name in self.__tokdict: + occurrences = self.__tokdict[name] + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return len( self.__toklist ) > 0 + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def keys( self ): + """Returns all named result keys.""" + return self.__tokdict.keys() + + def pop( self, index=-1 ): + """Removes and returns item at specified index (default=last). + Will work with either numeric indices or dict-key indicies.""" + ret = self[index] + del self[index] + return ret + + def get(self, key, defaultValue=None): + """Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified.""" + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """Inserts new element at location index in the list of parsed tokens.""" + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name in self.__tokdict: + occurrences = self.__tokdict[name] + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def items( self ): + """Returns all named result keys and values as a list of tuples.""" + return [(k,self[k]) for k in self.__tokdict] + + def values( self ): + """Returns all named result values.""" + return [ v[-1][0] for v in self.__tokdict.values() ] + + def __getattr__( self, name ): + if True: #name not in self.__slots__: + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + return None + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = ( lambda a: (a<0 and offset) or (a+offset) ) + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + return self.copy() + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + out = "[" + sep = "" + for i in self.__toklist: + if isinstance(i, ParseResults): + out += sep + _ustr(i) + else: + out += sep + repr(i) + sep = ", " + out += "]" + return out + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """Returns the parse results as a nested list of matching tokens, all converted to strings.""" + out = [] + for res in self.__toklist: + if isinstance(res,ParseResults): + out.append( res.asList() ) + else: + out.append( res ) + return out + + def asDict( self ): + """Returns the named parse results as dictionary.""" + return dict( self.items() ) + + def copy( self ): + """Returns a new copy of a C{ParseResults} object.""" + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.""" + nl = "\n" + out = [] + namedItems = dict( [ (v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist ] ) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + worklist = self.__toklist + for i,res in enumerate(worklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "" ] + + out += [ nl, indent, "" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + """Returns the results name for this token expression.""" + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + self.__tokdict.values()[0][0][1] in (0,-1)): + return self.__tokdict.keys()[0] + else: + return None + + def dump(self,indent='',depth=0): + """Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data.""" + out = [] + out.append( indent+_ustr(self.asList()) ) + keys = self.items() + keys.sort() + for k,v in keys: + if out: + out.append('\n') + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v.keys(): + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(_ustr(v)) + return "".join(out) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + self.__tokdict, \ + par, \ + inAccumNames, \ + self.__name = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __dir__(self): + return dir(super(ParseResults,self)) + self.keys() + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return (loc} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print ("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +'decorator to trim function calls to match the arity of the target' +if not _PY3K: + def _trim_arity(func, maxargs=2): + limit = [0] + def wrapper(*args): + while 1: + try: + return func(*args[limit[0]:]) + except TypeError: + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + return wrapper +else: + def _trim_arity(func, maxargs=2): + limit = maxargs + def wrapper(*args): + #~ nonlocal limit + while 1: + try: + return func(*args[limit:]) + except TypeError: + if limit: + limit -= 1 + continue + raise + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + def setDefaultWhitespaceChars( chars ): + """Overrides the default whitespace chars + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + setDefaultWhitespaceChars = staticmethod(setDefaultWhitespaceChars) + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element.""" + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """Define name for this expression, for use in debugging.""" + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + """ + newself = self.copy() + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """Define action to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a ParseResults object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = ("callDuringTry" in kwargs and kwargs["callDuringTry"]) + return self + + def addParseAction( self, *fns, **kwargs ): + """Add parse action to expression's list of parse actions. See L{I{setParseAction}}.""" + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or ("callDuringTry" in kwargs and kwargs["callDuringTry"]) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{ParseFatalException} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException: + #~ print ("Exception raised:", err) + err = None + if self.debugActions[2]: + err = sys.exc_info()[1] + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + if err is None: + err = sys.exc_info()[1] + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or loc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + err = sys.exc_info()[1] + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + lookup = (self,instring,loc,callPreParse,doActions) + if lookup in ParserElement._exprArgCache: + value = ParserElement._exprArgCache[ lookup ] + if isinstance(value, Exception): + raise value + return (value[0],value[1].copy()) + else: + try: + value = self._parseNoCache( instring, loc, doActions, callPreParse ) + ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy()) + return value + except ParseBaseException: + pe = sys.exc_info()[1] + ParserElement._exprArgCache[ lookup ] = pe + raise + + _parse = _parseNoCache + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + _exprArgCache = {} + def resetCache(): + ParserElement._exprArgCache.clear() + resetCache = staticmethod(resetCache) + + _packratEnabled = False + def enablePackrat(): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + ParserElement._parse = ParserElement._parseCache + enablePackrat = staticmethod(enablePackrat) + + def parseString( self, instring, parseAll=False ): + """Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{StringEnd()}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}} for more information on parsing + strings with embedded tabs.""" + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def transformString( self, instring ): + """Extension to C{scanString}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string.""" + out = [] + lastE = 0 + # force preservation of s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """Another extension to C{scanString}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def __add__(self, other ): + """Implementation of + operator - returns And""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """Implementation of + operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """Implementation of - operator, returns C{And} with error stop""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, And._ErrorStop(), other ] ) + + def __rsub__(self, other ): + """Implementation of - operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + ZeroOrMore(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{ZeroOrMore(expr)} + - C{expr*(1,None)} is equivalent to C{OneOrMore(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """Implementation of | operator - returns C{MatchFirst}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """Implementation of | operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """Implementation of ^ operator - returns C{Or}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """Implementation of ^ operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """Implementation of & operator - returns C{Each}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """Implementation of & operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """Implementation of ~ operator - returns C{NotAny}""" + return NotAny( self ) + + def __call__(self, name): + """Shortcut for C{setResultsName}, with C{listAllMatches=default}:: + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + could be written as:: + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + """ + if not name.endswith("*"): + return self.setResultsName(name) + else: + return self.setResultsName(name[:-1], listAllMatches=True) + + def suppress( self ): + """Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """Overrides default behavior to expand C{}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{} characters.""" + self.keepTabs = True + return self + + def ignore( self, other ): + """Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + """ + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append( other.copy() ) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """Enable display of debugging messages while doing pattern matching.""" + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable.""" + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """Check defined expressions for valid structure, check for infinite recursive definitions.""" + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + f = open(file_or_filename, "rb") + file_contents = f.read() + f.close() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def getException(self): + return ParseException("",0,self.errmsg,self) + + def __getattr__(self,aname): + if aname == "myException": + self.myException = ret = self.getException(); + return ret; + else: + raise AttributeError("no such attribute " + aname) + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or self.__dict__ == other.__dict__ + elif isinstance(other, basestring): + try: + self.parseString(_ustr(other), parseAll=True) + return True + except ParseBaseException: + return False + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + +class Token(ParserElement): + """Abstract C{ParserElement} subclass, for defining atomic matching patterns.""" + def __init__( self ): + super(Token,self).__init__( savelist=False ) + #self.myException = ParseException("",0,"",self) + + def setName(self, name): + s = super(Token,self).setName(name) + self.errmsg = "Expected " + self.name + #s.myException.msg = self.errmsg + return s + + +class Empty(Token): + """An empty token, will always match.""" + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """A token that will never match.""" + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + +class Literal(Token): + """Token to exactly match a specified string.""" + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + #self.myException.msg = self.errmsg + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc +_L = Literal + +class Keyword(Token): + """Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{Literal}:: + Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}. + Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive + matching, default is C{False}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ): + super(Keyword,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + setDefaultKeywordChars = staticmethod(setDefaultKeywordChars) + +class CaselessLiteral(Literal): + """Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class CaselessKeyword(Keyword): + def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class Word(Token): + """Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False ): + super(Word,self).__init__() + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.bodyCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + """Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if len(pattern) == 0: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + """Token for matching strings that are delimited by quoting characters. + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None): + """ + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=None) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None) + - multiline - boolean indicating whether quotes can span multiple lines (default=False) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=True) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=None => same as quoteChar) + """ + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if len(quoteChar) == 0: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if len(endQuoteChar) == 0: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join(["%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)]) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + charset = ''.join(set(self.quoteChar[0]+self.endQuoteChar[0])).replace('^',r'\^').replace('-',r'\-') + self.escCharReplacePattern = re.escape(self.escChar)+("([%s])" % charset) + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """Token for matching words composed of characters *not* in a given set. + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + #self.myException.msg = self.errmsg + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{Word} class.""" + whiteStrs = { + " " : "", + "\t": "", + "\n": "", + "\r": "", + "\f": "", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join([c for c in self.whiteChars if c not in self.matchWhite]) ) + #~ self.leaveWhitespace() + self.name = ("".join([White.whiteStrs[c] for c in self.matchWhite])) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """Token to advance to a specific column of input text; useful for tabular report scraping.""" + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + +class LineStart(_PositionToken): + """Matches if current position is at the beginning of a line within the parse string""" + def __init__( self ): + super(LineStart,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected start of line" + #self.myException.msg = self.errmsg + + def preParse( self, instring, loc ): + preloc = super(LineStart,self).preParse(instring,loc) + if instring[preloc] == "\n": + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + if not( loc==0 or + (loc == self.preParse( instring, 0 )) or + (instring[loc-1] == "\n") ): #col(loc, instring) != 1: + #~ raise ParseException( instring, loc, "Expected start of line" ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + +class LineEnd(_PositionToken): + """Matches if current position is at the end of a line within the parse string""" + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + if loc len(instring): + return loc, [] + else: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class WordStart(_PositionToken): + """Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + +class WordEnd(_PositionToken): + """Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + if loc2 > maxMatchLoc: + maxMatchLoc = loc2 + maxMatchExp = e + + if maxMatchLoc < 0: + if maxException is not None: + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + return maxMatchExp._parse( instring, loc, doActions ) + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = Literal( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if exprs: + self.mayReturnEmpty = False + for e in self.exprs: + if e.mayReturnEmpty: + self.mayReturnEmpty = True + break + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException, err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = Literal( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = True + for e in self.exprs: + if not e.mayReturnEmpty: + self.mayReturnEmpty = False + break + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and e not in opt1 ] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(e) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join( [ _ustr(e) for e in tmpReqd ] ) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = ParseResults([]) + for r in resultlist: + dups = {} + for k in r.keys(): + if k in finalResults.keys(): + tmp = ParseResults(finalResults[k]) + tmp += ParseResults(r[k]) + dups[k] = tmp + finalResults += ParseResults(r) + for k,v in dups.items(): + finalResults[k] = v + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.""" + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + expr = Literal(expr) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """Lookahead matching of the given parse expression. C{FollowedBy} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list.""" + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """Lookahead to disallow matching with the given parse expression. C{NotAny} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression does *not* match at the current + position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator.""" + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + #self.myException = ParseException("",0,self.errmsg,self) + + def parseImpl( self, instring, loc, doActions=True ): + try: + self.expr.tryParse( instring, loc ) + except (ParseException,IndexError): + pass + else: + #~ raise ParseException(instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + + +class ZeroOrMore(ParseElementEnhance): + """Optional repetition of zero or more of the given expression.""" + def __init__( self, expr ): + super(ZeroOrMore,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + tokens = [] + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.keys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + + +class OneOrMore(ParseElementEnhance): + """Repetition of one or more of the given expression.""" + def parseImpl( self, instring, loc, doActions=True ): + # must be at least one + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.keys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(OneOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """Optional matching of the given expression. + A default return string can also be specified, if the optional expression + is not found. + """ + def __init__( self, exprs, default=_optionalNotMatched ): + super(Optional,self).__init__( exprs, savelist=False ) + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + + +class SkipTo(ParseElementEnhance): + """Token for skipping over all undefined text until the matched expression is found. + If C{include} is set to true, the matched expression is also parsed (the skipped text + and matched expression are returned as a 2-element list). The C{ignore} + argument is used to define grammars (typically quoted strings and comments) that + might contain false matches. + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if failOn is not None and isinstance(failOn, basestring): + self.failOn = Literal(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + #self.myException = ParseException("",0,self.errmsg,self) + + def parseImpl( self, instring, loc, doActions=True ): + startLoc = loc + instrlen = len(instring) + expr = self.expr + failParse = False + while loc <= instrlen: + try: + if self.failOn: + try: + self.failOn.tryParse(instring, loc) + except ParseBaseException: + pass + else: + failParse = True + raise ParseException(instring, loc, "Found expression " + str(self.failOn)) + failParse = False + if self.ignoreExpr is not None: + while 1: + try: + loc = self.ignoreExpr.tryParse(instring,loc) + # print "found ignoreExpr, advance to", loc + except ParseBaseException: + break + expr._parse( instring, loc, doActions=False, callPreParse=False ) + skipText = instring[startLoc:loc] + if self.includeMatch: + loc,mat = expr._parse(instring,loc,doActions,callPreParse=False) + if mat: + skipRes = ParseResults( skipText ) + skipRes += mat + return loc, [ skipRes ] + else: + return loc, [ skipText ] + else: + return loc, [ skipText ] + except (ParseException,IndexError): + if failParse: + raise + else: + loc += 1 + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class Forward(ParseElementEnhance): + """Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = Literal(other) + self.expr = other + self.mayReturnEmpty = other.mayReturnEmpty + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return None + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret << self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """Abstract subclass of C{ParseExpression}, for converting parsed results.""" + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Upcase(TokenConverter): + """Converter to upper case all matching tokens.""" + def __init__(self, *args): + super(Upcase,self).__init__(*args) + warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead", + DeprecationWarning,stacklevel=2) + + def postParse( self, instring, loc, tokenlist ): + return list(map( string.upper, tokenlist )) + + +class Combine(TokenConverter): + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and len(retToks.keys())>0: + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """Converter to return the matched tokens as a list - useful for returning tokens of C{ZeroOrMore} and C{OneOrMore} expressions.""" + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + """ + def __init__( self, exprs ): + super(Dict,self).__init__( exprs ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.keys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """Converter for ignoring the results of a parsed expression.""" + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """Wrapper for parse actions, to ensure they are only called once.""" + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """Decorator for debugging parse actions.""" + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.func_name + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception: + exc = sys.exc_info()[1] + sys.stderr.write( "<", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join( [ _escapeRegexRangeChars(sym) for sym in symbols] ) ) + else: + return Regex( "|".join( [ re.escape(sym) for sym in symbols] ) ) + except: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst( [ parseElementClass(sym) for sym in symbols ] ) + +def dictOf( key, value ): + """Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{Dict}, C{ZeroOrMore}, and C{Group} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. Simpler to use than the parse action C{L{keepOriginalText}}, and does not + require the inspect module to chase up the call stack. By default, returns a + string containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{ParseResults} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values.""" + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + del t[:] + t.insert(0, s[t._original_start:t._original_end]) + del t["_original_start"] + del t["_original_end"] + matchExpr.setParseAction(extractText) + return matchExpr + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_printables_less_backslash = "".join([ c for c in printables if c not in r"\]" ]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(_printables_less_backslash,exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +_expanded = lambda p: (isinstance(p,ParseResults) and ''.join([ unichr(c) for c in range(ord(p[0]),ord(p[1])+1) ]) or p) + +def srange(s): + r"""Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be:: + a single character + an escaped character with a leading backslash (such as \- or \]) + an escaped hex character with a leading '\x' (\x21, which is a '!' character) + (\0x## is also supported for backwards compatibility) + an escaped octal character with a leading '\0' (\041, which is a '!' character) + a range of any of the above, separated by a dash ('a-z', etc.) + any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.) + """ + try: + return "".join([_expanded(part) for part in _reBracketExpr.parseString(s).body]) + except: + return "" + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{transformString()}. + """ + def _replFunc(*args): + return [replStr] + return _replFunc + +def removeQuotes(s,l,t): + """Helper parse action for removing quotation marks from parsed quoted strings. + To use, add this parse action to quoted string using:: + quotedString.setParseAction( removeQuotes ) + """ + return t[0][1:-1] + +def upcaseTokens(s,l,t): + """Helper parse action to convert tokens to upper case.""" + return [ tt.upper() for tt in map(_ustr,t) ] + +def downcaseTokens(s,l,t): + """Helper parse action to convert tokens to lower case.""" + return [ tt.lower() for tt in map(_ustr,t) ] + +def keepOriginalText(s,startLoc,t): + """DEPRECATED - use new helper method C{originalTextFor}. + Helper parse action to preserve original parsed text, + overriding any nested parse actions.""" + try: + endloc = getTokensEndLoc() + except ParseException: + raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action") + del t[:] + t += ParseResults(s[startLoc:endloc]) + return t + +def getTokensEndLoc(): + """Method to be called from within a parse action to determine the end + location of the parsed tokens.""" + import inspect + fstack = inspect.stack() + try: + # search up the stack (through intervening argument normalizers) for correct calling routine + for f in fstack[2:]: + if f[3] == "_parseNoCache": + endloc = f[0].f_locals["loc"] + return endloc + else: + raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action") + finally: + del fstack + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join( [ c for c in printables if c not in ">" ] ) + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % tagStr) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, given a tag name""" + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, given a tag name""" + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """Helper to create a validating parse action to be used with start tags created + with C{makeXMLTags} or C{makeHTMLTags}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{} or C{
}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def operatorPrecedence( baseExpr, opList ): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants opAssoc.RIGHT and opAssoc.LEFT. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + """ + ret = Forward() + lastExpr = baseExpr | ( Suppress('(') + ret + Suppress(')') ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward()#.setName("expr%d" % i) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + matchExpr.setParseAction( pa ) + thisExpr << ( matchExpr | lastExpr ) + lastExpr = thisExpr + ret << lastExpr + return ret + +dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes") +sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes") +quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()) + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default="("); can also be a pyparsing expression + - closer - closing character for a nested list (default=")"); can also be a pyparsing expression + - content - expression for items within the nested lists (default=None) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret << Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret << Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=True) + + A valid block must contain at least one C{blockStatement}. + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = Empty() + Empty().setParseAction(checkSubIndent) + PEER = Empty().setParseAction(checkPeerIndent) + UNDENT = Empty().setParseAction(checkUnindent) + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:")) +commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";").streamline() +_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),'><& "')) +replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment") + +htmlComment = Regex(r"") +restOfLine = Regex(r".*").leaveWhitespace() +dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment") +cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?" + str(tokenlist)) + print ("tokens = " + str(tokens)) + print ("tokens.columns = " + str(tokens.columns)) + print ("tokens.tables = " + str(tokens.tables)) + print (tokens.asXML("SQL",True)) + except ParseBaseException: + err = sys.exc_info()[1] + print (teststring + "->") + print (err.line) + print (" "*(err.column-1) + "^") + print (err) + print() + + selectToken = CaselessLiteral( "select" ) + fromToken = CaselessLiteral( "from" ) + + ident = Word( alphas, alphanums + "_$" ) + columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + columnNameList = Group( delimitedList( columnName ) )#.setName("columns") + tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + tableNameList = Group( delimitedList( tableName ) )#.setName("tables") + simpleSQL = ( selectToken + \ + ( '*' | columnNameList ).setResultsName( "columns" ) + \ + fromToken + \ + tableNameList.setResultsName( "tables" ) ) + + test( "SELECT * from XYZZY, ABC" ) + test( "select * from SYS.XYZZY" ) + test( "Select A from Sys.dual" ) + test( "Select AA,BB,CC from Sys.dual" ) + test( "Select A, B, C from Sys.dual" ) + test( "Select A, B, C from Sys.dual" ) + test( "Xelect A, B, C from Sys.dual" ) + test( "Select A, B, C frox Sys.dual" ) + test( "Select" ) + test( "Select ^^^ frox Sys.dual" ) + test( "Select A, B, C from Sys.dual, Table2 " ) diff --git a/scripts/pyclibrary/CLibrary.py b/scripts/pyclibrary/CLibrary.py new file mode 100644 index 000000000..17424bc4e --- /dev/null +++ b/scripts/pyclibrary/CLibrary.py @@ -0,0 +1,501 @@ +# -*- coding: utf-8 -*- +""" +CLibrary.py - Provides CLibrary class +Copyright 2010 Luke Campagnola +Distributed under MIT/X11 license. See license.txt for more infomation. + +Proxy to both CHeader and ctypes, allowing automatic type conversion and +function calling based on C header definitions. +""" + + +from ctypes import * +import sys + + +class CLibrary: + """The CLibrary class is intended to automate much of the work in using ctypes by integrating + header file definitions from CParser. Ths class serves as a proxy to a ctypes, adding + a few features: + - allows easy access to values defined via CParser + - automatic type conversions for function calls using CParser function signatures + - creates ctype classes based on type definitions from CParser + + Initialize using a ctypes shared object and a CParser: + headers = CParser.winDefs() + lib = CLibrary(windll.User32, headers) + + There are 3 ways to access library elements: + lib(type, name) - type can be one of 'values', 'functions', 'types', 'structs', 'unions', or 'enums'. + Returns an object matching name. For values, the value from the headers is + returned. For functions, a callable object is returned that handles automatic + type conversion for arguments and return values. for structs, types, and enums, + a ctypes class is returned matching the type specified. + lib.name - searches in order through values, functions, types, structs, unions, and enums from + header definitions and returns an object for the first match found. The object + returned is the same as returned by lib(type, name). This is the preferred way to access + elements from CLibrary, but may not work in some situations (for example, if + a struct and variable share the same name). + lib[type] - Accesses the header definitions directly, returns definition dictionaries + based on the type requested. This is equivalent to headers.defs[type]. + """ + Null = object() + + cTypes = { + 'char': c_char, + 'wchar': c_wchar, + 'unsigned char': c_ubyte, + 'short': c_short, + 'short int': c_short, + 'unsigned short': c_ushort, + 'unsigned short int': c_ushort, + 'int': c_int, + 'unsigned': c_uint, + 'unsigned int': c_uint, + 'long': c_long, + 'long int': c_long, + 'unsigned long': c_ulong, + 'unsigned long int': c_ulong, + '__int64': c_longlong, + 'long long': c_longlong, + 'long long int': c_longlong, + 'unsigned __int64': c_ulonglong, + 'unsigned long long': c_ulonglong, + 'unsigned long long int': c_ulonglong, + 'float': c_float, + 'double': c_double, + 'long double': c_longdouble + } + cPtrTypes = { + 'char': c_char_p, + 'wchar': c_wchar_p, + 'void': c_void_p + } + + + + def __init__(self, lib, headers, prefix=None): + ## name everything using underscores to avoid name collisions with library + + self._lib_ = lib + self._headers_ = headers + self._defs_ = headers.defs + if prefix is None: + self._prefix_ = [] + elif type(prefix) is list: + self._prefix_ = prefix + else: + self._prefix_ = [prefix] + self._objs_ = {} + for k in ['values', 'functions', 'types', 'structs', 'unions', 'enums']: + self._objs_[k] = {} + self._allObjs_ = {} + self._structs_ = {} + self._unions_ = {} + + def __call__(self, typ, name): + if typ not in self._objs_: + typs = self._objs_.keys() + raise Exception("Type must be one of %s" % str(typs)) + + if name not in self._objs_[typ]: + self._objs_[typ][name] = self._mkObj_(typ, name) + + return self._objs_[typ][name] + + def _allNames_(self, name): + return [name] + [p + name for p in self._prefix_] + + def _mkObj_(self, typ, name): + names = self._allNames_(name) + + for n in names: + if n in self._objs_: + return self._objs_[n] + + for n in names: ## try with and without prefix + if n not in self._defs_[typ] and not (typ in ['structs', 'unions', 'enums'] and n in self._defs_['types']): + continue + + if typ == 'values': + return self._defs_[typ][n] + elif typ == 'functions': + return self._getFunction(n) + elif typ == 'types': + obj = self._defs_[typ][n] + return self._ctype(obj) + elif typ == 'structs': + return self._cstruct('structs', n) + elif typ == 'unions': + return self._cstruct('unions', n) + elif typ == 'enums': + ## Allow automatic resolving of typedefs that alias enums + if n not in self._defs_['enums']: + if n not in self._defs_['types']: + raise Exception('No enums named "%s"' % n) + typ = self._headers_.evalType([n])[0] + if typ[:5] != 'enum ': + raise Exception('No enums named "%s"' % n) + n = self._defs_['types'][typ][1] ## look up internal name of enum + obj = self._defs_['enums'][n] + + return obj + else: + raise Exception("Unknown type %s" % typ) + raise NameError(name) + + + def __getattr__(self, name): + """Used to retrieve any type of definition from the headers. Searches for the name in this order: + values, functions, types, structs, unions, enums.""" + if name not in self._allObjs_: + names = self._allNames_(name) + for k in ['values', 'functions', 'types', 'structs', 'unions', 'enums', None]: + if k is None: + raise NameError(name) + obj = None + for n in names: + if n in self._defs_[k]: + obj = self(k, n) + break + if obj is not None: + break + self._allObjs_[name] = obj + return self._allObjs_[name] + + def __getitem__(self, name): + """Used to retrieve a specific dictionary from the headers.""" + return self._defs_[name] + + def __repr__(self): + return "" % str(self._lib_) + + def _getFunction(self, funcName): + try: + func = getattr(self._lib_, funcName) + except: + raise Exception("Function name '%s' appears in headers but not in library!" % func) + + #print "create function %s," % (funcName), self._defs_['functions'][funcName] + return CFunction(self, func, self._defs_['functions'][funcName], funcName) + + def _ctype(self, typ, pointers=True): + """return a ctype object representing the named type. + If pointers is True, the class returned includes all pointer/array specs provided. + Otherwise, the class returned is just the base type with no pointers.""" + try: + typ = self._headers_.evalType(typ) + mods = typ[1:][:] + + ## Create the initial type + ## Some types like ['char', '*'] have a specific ctype (c_char_p) + ## (but only do this if pointers == True) + if pointers and len(typ) > 1 and typ[1] == '*' and typ[0] in CLibrary.cPtrTypes: + cls = CLibrary.cPtrTypes[typ[0]] + mods = typ[2:] + + ## If the base type is in the list of existing ctypes: + elif typ[0] in CLibrary.cTypes: + cls = CLibrary.cTypes[typ[0]] + + ## structs, unions, enums: + elif typ[0][:7] == 'struct ': + cls = self._cstruct('structs', self._defs_['types'][typ[0]][1]) + elif typ[0][:6] == 'union ': + cls = self._cstruct('unions', self._defs_['types'][typ[0]][1]) + elif typ[0][:5] == 'enum ': + cls = c_int + + ## void + elif typ[0] == 'void': + cls = None + else: + #print typ + raise Exception("Can't find base type for %s" % str(typ)) + + if not pointers: + return cls + + ## apply pointers and arrays + while len(mods) > 0: + m = mods.pop(0) + if isinstance(m, basestring): ## pointer or reference + if m[0] == '*' or m[0] == '&': + for i in m: + cls = POINTER(cls) + elif type(m) is list: ## array + for i in m: + if i == -1: ## -1 indicates an 'incomplete type' like "int variable[]" + cls = POINTER(cls) ## which we should interpret like "int *variable" + else: + cls = cls * i + elif type(m) is tuple: ## Probably a function pointer + ## Find pointer and calling convention + isPtr = False + conv = '__cdecl' + if len(mods) == 0: + raise Exception("Function signature with no pointer:", m, mods) + for i in [0,1]: + if len(mods) < 1: + break + if mods[0] == '*': + mods.pop(0) + isPtr = True + elif mods[0] in ['__stdcall', '__cdecl']: + conv = mods.pop(0) + else: + break + if not isPtr: + raise Exception("Not sure how to handle type (function without single pointer): %s" % str(typ)) + + if conv == '__stdcall': + mkfn = WINFUNCTYPE + else: + mkfn = CFUNCTYPE + #print "Create function pointer (%s)" % conv + + args = [self._ctype(arg[1]) for arg in m] + cls = mkfn(cls, *args) + + else: + raise Exception("Not sure what to do with this type modifier: '%s'" % str(p)) + return cls + except: + print "Error while processing type", typ + raise + + def _cstruct(self, strType, strName): + if strName not in self._structs_: + + ## Resolve struct name--typedef aliases allowed. + if strName not in self._defs_[strType]: + if strName not in self._defs_['types']: + raise Exception('No struct/union named "%s"' % strName) + typ = self._headers_.evalType([strName])[0] + if typ[:7] != 'struct ' and typ[:6] != 'union ': + raise Exception('No struct/union named "%s"' % strName) + strName = self._defs_['types'][typ][1] + + ## Pull struct definition + defn = self._defs_[strType][strName] + + + ## create ctypes class + defs = defn['members'][:] + if strType == 'structs': + class s(Structure): + def __repr__(self): + return "" % strName + elif strType == 'unions': + class s(Union): + def __repr__(self): + return "" % strName + + + ## must register struct here to allow recursive definitions. + self._structs_[strName] = s + + if defn['pack'] is not None: + s._pack_ = defn['pack'] + + ## assign names to anonymous members + members = [] + anon = [] + for i in range(len(defs)): + if defs[i][0] is None: + c = 0 + while True: + name = 'anon_member%d' % c + if name not in members: + defs[i][0] = name + anon.append(name) + break + members.append(defs[i][0]) + + s._anonymous_ = anon + s._fields_ = [(m[0], self._ctype(m[1])) for m in defs] + s._defaults_ = [m[2] for m in defs] + return self._structs_[strName] + + + +class CFunction: + def __init__(self, lib, func, sig, name): + self.lib = lib + self.func = func + #print sig + self.sig = list(sig) # looks like [return_type, [(argName, type, default), (argName, type, default), ...]] + self.sig[1] = [s for s in sig[1] if s[1] != ['void']] ## remove void args from list + for conv in ['__stdcall', '__cdecl']: + if conv in self.sig[0]: + self.sig[0].remove(conv) + self.name = name + self.restype = lib._ctype(self.sig[0]) + #func.restype = self.restype + self.argTypes = [lib._ctype(s[1]) for s in self.sig[1]] + func.argtypes = self.argTypes + self.reqArgs = [x[0] for x in self.sig[1] if x[2] is None] + self.argInds = dict([(self.sig[1][i][0], i) for i in range(len(self.sig[1]))]) ## mapping from argument names to indices + #print "created func", self, sig, self.argTypes + + def argCType(self, arg): + """Return the ctype required for the specified argument. + arg can be either an integer or the name of the argument. + """ + if isinstance(arg, basestring): + arg = self.argInds[arg] + return self.lib._ctype(self.sig[1][arg][1]) + + def __call__(self, *args, **kwargs): + """Invoke the SO or dll function referenced, converting all arguments to the correct type. + Keyword arguments are allowed as long as the header specifies the argument names. + Arguments which are passed byref may be omitted entirely, and will be automaticaly generated. + To pass a NULL pointer, give None as the argument. + Returns the return value of the function call as well as all of the arguments (so that objects passed by reference can be retrieved)""" + #print "CALL: %s(%s)" % (self.name, ", ".join(map(str, args) + ["%s=%s" % (k, str(kwargs[k])) for k in kwargs])) + #print " sig:", self.sig + argList = [None] * max(len(self.reqArgs), len(args)) ## We'll need at least this many arguments. + + ## First fill in args + for i in range(len(args)): + #argList[i] = self.argTypes[i](args[i]) + if args[i] is None: + argList[i] = self.lib.Null + else: + argList[i] = args[i] + + ## Next fill in kwargs + for k in kwargs: + #print " kw:", k + if k not in self.argInds: + print "Function signature:", self.prettySignature() + raise Exception("Function signature has no argument named '%s'" % k) + ind = self.argInds[k] + if ind >= len(argList): ## stretch argument list if needed + argList += [None] * (ind - len(argList) + 1) + #argList[ind] = self.coerce(kwargs[k], self.argTypes[ind]) + if kwargs[k] is None: + argList[ind] = self.lib.Null + else: + argList[ind] = kwargs[k] + + guessedArgs = [] + ## Finally, fill in remaining arguments if they are pointers to int/float/void*/struct values + ## (we assume these are to be modified by the function and their initial value is not important) + for i in range(len(argList)): + if argList[i] is None or argList[i] is self.lib.Null: + try: + sig = self.sig[1][i][1] + argType = self.lib._headers_.evalType(sig) + if argList[i] is self.lib.Null: ## request to build a null pointer + if len(argType) < 2: + raise Exception("Can not create NULL for non-pointer argument type: %s" % str(argType)) + argList[i] = self.lib._ctype(sig)() + #elif argType == ['char', '*']: ## pass null pointer if none was specified. This is a little dangerous, but some functions will expect it. + #argList[i] = c_char_p() ## On second thought: let's just require the user to explicitly ask for a NULL pointer. + else: + if argType == ['void', '**'] or argType == ['void', '*', '*']: + cls = c_void_p + else: + assert len(argType) == 2 and argType[1] == '*' ## Must be 2-part type, second part must be '*' + cls = self.lib._ctype(sig, pointers=False) + argList[i] = pointer(cls(0)) + guessedArgs.append(i) + except: + if sys.exc_info()[0] is not AssertionError: + raise + #sys.excepthook(*sys.exc_info()) + print "Function signature:", self.prettySignature() + raise Exception("Function call '%s' missing required argument %d '%s'. (See above for signature)" % (self.name, i, self.sig[1][i][0])) + #print " args:", argList + try: + res = self.func(*argList) + except: + print "Function call failed. Signature is:", self.prettySignature() + print "Arguments:", argList + print "Argtypes:", self.func.argtypes + raise + #print " result:", res + + cr = CallResult(res, argList, self.sig, guessed=guessedArgs) + return cr + + def prettySignature(self): + return "%s %s(%s)" % (''.join(self.sig[0]), self.name, ', '.join(["%s %s" % ("".join(map(str, s[1])), s[0]) for s in self.sig[1]])) + +class CallResult: + """Class for bundling results from C function calls. Allows access to the function + return value as well as all of the arguments, since the function call will often return + extra values via these arguments. + - Original ctype objects can be accessed via result.rval or result.args + - Python values carried by these objects can be accessed using () + To access values: + - The return value: () + - The nth argument passed: [n] + - The argument by name: ['name'] + - All values that were auto-generated: .auto() + + The class can also be used as an iterator, so that tuple unpacking is possible: + ret, arg1, arg2 = lib.runSomeFunction(...) + """ + def __init__(self, rval, args, sig, guessed): + self.rval = rval ## return value of function call + self.args = args ## list of arguments to function call + self.sig = sig ## function signature + self.guessed = guessed ## list of arguments that were generated automatically (usually byrefs) + + def __call__(self): + #print "Clibrary:", type(self.rval), self.mkVal(self.rval) + if self.sig[0] == ['void']: + return None + return self.mkVal(self.rval) + + def __getitem__(self, n): + if type(n) is int: + return self.mkVal(self.args[n]) + elif type(n) is str: + ind = self.findArg(n) + return self.mkVal(self.args[ind]) + else: + raise Exception("Index must be int or str.") + + def __setitem__(self, n, val): + if type(n) is int: + self.args[n] = val + elif type(n) is str: + ind = self.findArg(n) + self.args[ind] = val + else: + raise Exception("Index must be int or str.") + + + def mkVal(self, obj): + while not hasattr(obj, 'value'): + if not hasattr(obj, 'contents'): + return obj + try: + obj = obj.contents + except ValueError: + return None + + return obj.value + + + def findArg(self, arg): + for i in range(len(self.sig[1])): + if self.sig[1][i][0] == arg: + return i + raise Exception("Can't find argument '%s' in function signature. Arguments are: %s" % (arg, str([a[0] for a in self.sig[1]]))) + + def __iter__(self): + yield self() + for i in range(len(self.args)): + yield(self[i]) + + def auto(self): + return [self[n] for n in self.guessed] + + + + + diff --git a/scripts/pyclibrary/CParser.py b/scripts/pyclibrary/CParser.py new file mode 100644 index 000000000..df2b4dea6 --- /dev/null +++ b/scripts/pyclibrary/CParser.py @@ -0,0 +1,1274 @@ +# -*- coding: utf-8 -*- +""" +CParser.py - C parsing library +Copyright 2010 Luke Campagnola +Distributed under MIT/X11 license. See license.txt for more infomation. + +Used for extracting data such as macro definitions, variables, typedefs, and function +signatures from C files (preferrably header files). +""" + +import sys, re, os + +__all__ = ['winDefs', 'CParser'] + + +def winDefs(verbose=False): + """Convenience function. Returns a parser which loads a selection of windows headers included with + CParser. These definitions can either be accessed directly or included before parsing + another file like this: + windefs = CParser.winDefs() + p = CParser.CParser("headerFile.h", copyFrom=windefs) + Definitions are pulled from a selection of header files included in Visual Studio + (possibly not legal to distribute? Who knows.), some of which have been abridged + because they take so long to parse. + """ + headerFiles = ['WinNt.h', 'WinDef.h', 'WinBase.h', 'BaseTsd.h', 'WTypes.h', 'WinUser.h'] + d = os.path.dirname(__file__) + p = CParser( + [os.path.join(d, 'headers', h) for h in headerFiles], + types={'__int64': ('long long')}, + macros={'_WIN32': '', '_MSC_VER': '800', 'CONST': 'const', 'NO_STRICT': None}, + processAll=False + ) + p.processAll(cache=os.path.join(d, 'headers', 'WinDefs.cache'), noCacheWarning=True, verbose=verbose) + return p + + +class CParser(): + """Class for parsing C code to extract variable, struct, enum, and function declarations as well as preprocessor macros. This is not a complete C parser; instead, it is meant to simplify the process + of extracting definitions from header files in the absence of a complete build system. Many files + will require some amount of manual intervention to parse properly (see 'replace' and extra arguments + to __init__) + + Usage: + ## create parser object, load two files + p = CParser(['header1.h', 'header2.h']) + + ## remove comments, preprocess, and search for declarations + p.processAll() + + ## just to see what was successfully parsed from the files + p.printAll() + + ## access parsed declarations + allValues = p.defs['values'] + functionSignatures = p.defs['functions'] + ... + + ## To see what was not successfully parsed: + unp = p.processAll(returnUnparsed=True) + for s in unp: + print s + """ + + cacheVersion = 22 ## increment every time cache structure or parsing changes to invalidate old cache files. + + def __init__(self, files=None, replace=None, copyFrom=None, processAll=True, cache=None, verbose=False, **args): + """Create a C parser object fiven a file or list of files. Files are read to memory and operated + on from there. + 'copyFrom' may be another CParser object from which definitions should be copied. + 'replace' may be specified to perform string replacements before parsing. + format is {'searchStr': 'replaceStr', ...} + Extra parameters may be used to specify the starting state of the parser. For example, + one could provide a set of missing type declarations by + types={'UINT': ('unsigned int'), 'STRING': ('char', 1)} + Similarly, preprocessor macros can be specified: + macros={'WINAPI': ''} + """ + + + self.defs = {} ## holds all definitions + self.fileDefs = {} ## holds definitions grouped by the file they came from + + self.initOpts = args.copy() + self.initOpts['files'] = [] + self.initOpts['replace'] = {} + + self.dataList = ['types', 'variables', 'fnmacros', 'macros', 'structs', 'unions', 'enums', 'functions', 'values'] + + self.verbose = False + + # placeholders for definitions that change during parsing + #if hasPyParsing: + #self.macroExpr = Forward() + #self.fnMacroExpr = Forward() + #self.definedType = Forward() + #self.definedStruct = Forward() + #self.definedEnum = Forward() + + self.fileOrder = [] + self.files = {} + self.packList = {} ## list describing struct packing rules as defined by #pragma pack + if files is not None: + if type(files) is str: + files = [files] + for f in files: + self.loadFile(f, replace) + + ## initialize empty definition lists + for k in self.dataList: + self.defs[k] = {} + #for f in files: + #self.fileDefs[f][k] = {} + + self.compiledTypes = {} ## holds translations from typedefs/structs/unions to fundamental types + + self.currentFile = None + + # Import extra arguments if specified + for t in args: + for k in args[t].keys(): + self.addDef(t, k, args[t][k]) + + # Import from other CParsers if specified + if copyFrom is not None: + if type(copyFrom) not in [list, tuple]: + copyFrom = [copyFrom] + for p in copyFrom: + self.importDict(p.fileDefs) + + if processAll: + self.processAll(cache=cache, verbose=verbose) + + def processAll(self, cache=None, returnUnparsed=False, printAfterPreprocess=False, noCacheWarning=True, verbose=False): + """Remove comments, preprocess, and parse declarations from all files. (operates in memory; does not alter the original files) + Returns a list of the results from parseDefs. + 'cache' may specify a file where cached results are be stored or retrieved. The cache + is automatically invalidated if any of the arguments to __init__ are changed, or if the + C files are newer than the cache. + 'returnUnparsed' is passed directly to parseDefs. + 'printAfterPreprocess' is for debugging; prints the result of preprocessing each file.""" + self.verbose = verbose + if cache is not None and self.loadCache(cache, checkValidity=True): + if verbose: + print "Loaded cached definitions; will skip parsing." + return ## cached values loaded successfully, nothing left to do here + #else: + #print "No cache.", cache + + + results = [] + if noCacheWarning or verbose: + print "Parsing C header files (no valid cache found). This could take several minutes..." + for f in self.fileOrder: + #fn = os.path.basename(f) + if self.files[f] is None: + ## This means the file could not be loaded and there was no cache. + raise Exception('Could not find header file "%s" or a suitable cache file.' % f) + if verbose: + print "Removing comments from file '%s'..." % f + self.removeComments(f) + if verbose: + print "Preprocessing file '%s'..." % f + self.preprocess(f) + if printAfterPreprocess: + print "===== PREPROCSSED %s =======" % f + print self.files[f] + if verbose: + print "Parsing definitions in file '%s'..." % f + results.append(self.parseDefs(f, returnUnparsed)) + + if cache is not None: + if verbose: + print "Writing cache file '%s'" % cache + self.writeCache(cache) + + return results + + + def loadCache(self, cacheFile, checkValidity=False): + """Load a cache file. Used internally if cache is specified in processAll(). + if checkValidity=True, then run several checks before loading the cache: + - cache file must not be older than any source files + - cache file must not be older than this library file + - options recorded in cache must match options used to initialize CParser""" + + ## make sure cache file exists + if type(cacheFile) is not str: + raise Exception("cache file option must be a string.") + if not os.path.isfile(cacheFile): + d = os.path.dirname(__file__) ## If file doesn't exist, search for it in this module's path + cacheFile = os.path.join(d, "headers", cacheFile) + if not os.path.isfile(cacheFile): + if self.verbose: + print "Can't find requested cache file." + return False + + ## make sure cache is newer than all input files + if checkValidity: + mtime = os.stat(cacheFile).st_mtime + for f in self.fileOrder: + ## if file does not exist, then it does not count against the validity of the cache. + if os.path.isfile(f) and os.stat(f).st_mtime > mtime: + if self.verbose: + print "Cache file is out of date." + return False + + try: + ## read cache file + import pickle + cache = pickle.load(open(cacheFile, 'rb')) + + ## make sure __init__ options match + if checkValidity: + if cache['opts'] != self.initOpts: + if self.verbose: + print "Cache file is not valid--created using different initialization options." + print cache['opts'] + print self.initOpts + return False + elif self.verbose: + print "Cache init opts are OK:" + print cache['opts'] + if cache['version'] < self.cacheVersion: + if self.verbose: + print "Cache file is not valid--cache format has changed." + return False + + ## import all parse results + self.importDict(cache['fileDefs']) + return True + except: + print "Warning--cache read failed:" + sys.excepthook(*sys.exc_info()) + return False + + def importDict(self, data): + """Import definitions from a dictionary. The dict format should be the + same as CParser.fileDefs. Used internally; does not need to be called + manually.""" + for f in data.keys(): + self.currentFile = f + for k in self.dataList: + for n in data[f][k]: + self.addDef(k, n, data[f][k][n]) + + def writeCache(self, cacheFile): + """Store all parsed declarations to cache. Used internally.""" + cache = {} + cache['opts'] = self.initOpts + cache['fileDefs'] = self.fileDefs + cache['version'] = self.cacheVersion + #for k in self.dataList: + #cache[k] = getattr(self, k) + import pickle + pickle.dump(cache, open(cacheFile, 'wb')) + + def loadFile(self, file, replace=None): + """Read a file, make replacements if requested. Called by __init__, should + not be called manually.""" + if not os.path.isfile(file): + ## Not a fatal error since we might be able to function properly if there is a cache file.. + #raise Exception("File %s not found" % file) + print "Warning: C header '%s' is missing; this may cause trouble." % file + self.files[file] = None + return False + + fd = open(file, 'rU') ## U causes all newline types to be converted to \n + self.files[file] = fd.read() + fd.close() + + if replace is not None: + for s in replace: + self.files[file] = re.sub(s, replace[s], self.files[file]) + self.fileOrder.append(file) + bn = os.path.basename(file) + self.initOpts['replace'][bn] = replace + self.initOpts['files'].append(bn) # only interested in the file names; the directory may change between systems. + return True + + + + + + #### Beginning of processing functions + + def assertPyparsing(self): + """Make sure pyparsing module is available.""" + global hasPyParsing + if not hasPyParsing: + raise Exception("CParser class requires 'pyparsing' library for actual parsing work. Without this library, CParser can only be used with previously cached parse results.") + + + def removeComments(self, file): + """Remove all comments from file. (operates in memory; does not alter the original files)""" + self.assertPyparsing() + text = self.files[file] + cplusplusLineComment = Literal("//") + restOfLine + # match quoted strings first to prevent matching comments inside quotes + self.files[file] = (quotedString | cStyleComment.suppress() | cplusplusLineComment.suppress()).transformString(text) + + + def preprocess(self, file): + """Scan named file for preprocessor directives, removing them while expanding macros. (operates in memory; does not alter the original files)""" + self.assertPyparsing() + self.buildParser() ## we need this so that evalExpr works properly + self.currentFile = file + packStack = [(None,None)] ## stack for #pragma pack push/pop + self.packList[file] = [(0,None)] + packing = None ## current packing value + + text = self.files[file] + + ## First join together lines split by \\n + text = Literal('\\\n').suppress().transformString(text) + + #self.ppDirective = Combine("#" + Word(alphas).leaveWhitespace()) + restOfLine + + # define the structure of a macro definition + name = Word(alphas+'_', alphanums+'_')('name') + self.ppDefine = name.setWhitespaceChars(' \t')("macro") + Optional(lparen + delimitedList(name) + rparen).setWhitespaceChars(' \t')('args') + SkipTo(LineEnd())('value') + self.ppDefine.setParseAction(self.processMacroDefn) + + #self.updateMacroDefns() + #self.updateFnMacroDefns() + + # define pattern for scanning through the input string + #self.macroExpander = (self.macroExpr | self.fnMacroExpr) + + ## Comb through lines, process all directives + lines = text.split('\n') + + result = [] + #macroExpander = (quotedString | self.macroExpander) + directive = re.compile(r'\s*#([a-zA-Z]+)(.*)$') + ifTrue = [True] + ifHit = [] + for i in range(len(lines)): + line = lines[i] + newLine = '' + m = directive.match(line) + if m is None: # regular code line + if ifTrue[-1]: # only include if we are inside the correct section of an IF block + #line = macroExpander.transformString(line) # expand all known macros + newLine = self.expandMacros(line) + else: # macro line + d = m.groups()[0] + rest = m.groups()[1] + + #print "PREPROCESS:", d, rest + if d == 'ifdef': + d = 'if' + rest = 'defined '+rest + elif d == 'ifndef': + d = 'if' + rest = '!defined '+rest + + ## Evaluate 'defined' operator before expanding macros + if d in ['if', 'elif']: + def pa(t): + return ['0', '1'][t['name'] in self.defs['macros'] or t['name'] in self.defs['fnmacros']] + rest = ( + Keyword('defined') + + (name | lparen + name + rparen) + ).setParseAction(pa).transformString(rest) + elif d in ['define', 'undef']: + macroName, rest = re.match(r'\s*([a-zA-Z_][a-zA-Z0-9_]*)(.*)$', rest).groups() + + ## Expand macros if needed + if rest is not None and (all(ifTrue) or d in ['if', 'elif']): + rest = self.expandMacros(rest) + + if d == 'elif': + if ifHit[-1] or not all(ifTrue[:-1]): + ev = False + else: + ev = self.evalPreprocessorExpr(rest) + if self.verbose: + print " "*(len(ifTrue)-2) + line, rest, ev + ifTrue[-1] = ev + ifHit[-1] = ifHit[-1] or ev + elif d == 'else': + if self.verbose: + print " "*(len(ifTrue)-2) + line, not ifHit[-1] + ifTrue[-1] = (not ifHit[-1]) and all(ifTrue[:-1]) + ifHit[-1] = True + elif d == 'endif': + ifTrue.pop() + ifHit.pop() + if self.verbose: + print " "*(len(ifTrue)-1) + line + elif d == 'if': + if all(ifTrue): + ev = self.evalPreprocessorExpr(rest) + else: + ev = False + if self.verbose: + print " "*(len(ifTrue)-1) + line, rest, ev + ifTrue.append(ev) + ifHit.append(ev) + elif d == 'define': + if not ifTrue[-1]: + continue + if self.verbose: + print " "*(len(ifTrue)) + "define:", macroName, rest + try: + self.ppDefine.parseString(macroName+ ' ' + rest) ## macro is registered here + except: + print "Error processing macro definition:", macroName, rest + print " ", sys.exc_info()[1] + elif d == 'undef': + if not ifTrue[-1]: + continue + try: + self.remDef('macros', macroName.strip()) + #self.macroListString = '|'.join(self.defs['macros'].keys() + self.defs['fnmacros'].keys()) + #self.updateMacroDefns() + except: + if sys.exc_info()[0] is not KeyError: + sys.excepthook(*sys.exc_info()) + print "Error removing macro definition '%s'" % macroName.strip() + elif d == 'pragma': ## Check for changes in structure packing + if not ifTrue[-1]: + continue + m = re.match(r'\s+pack\s*\(([^\)]+)\)', rest) + if m is None: + continue + opts = [s.strip() for s in m.groups()[0].split(',')] + + pushpop = id = val = None + for o in opts: + if o in ['push', 'pop']: + pushpop = o + elif o.isdigit(): + val = int(o) + else: + id = o + + if val is not None: + packing = val + + if pushpop == 'push': + packStack.append((packing, id)) + elif opts[0] == 'pop': + if id is None: + packStack.pop() + else: + ind = None + for i in range(len(packStack)): + if packStack[i][1] == id: + ind = i + break + if ind is not None: + packStack = packStack[:ind] + if val is None: + packing = packStack[-1][0] + else: + packing = int(opts[0]) + + if self.verbose: + print ">> Packing changed to %s at line %d" % (str(packing), i) + self.packList[file].append((i, packing)) + else: + pass ## Ignore any other directives + + result.append(newLine) + self.files[file] = '\n'.join(result) + + def evalPreprocessorExpr(self, expr): + ## make a few alterations so the expression can be eval'd + macroDiffs = ( + Literal('!').setParseAction(lambda: ' not ') | + Literal('&&').setParseAction(lambda: ' and ') | + Literal('||').setParseAction(lambda: ' or ') | + Word(alphas+'_',alphanums+'_').setParseAction(lambda: '0')) + expr2 = macroDiffs.transformString(expr) + + try: + ev = bool(eval(expr2)) + except: + if self.verbose: + print "Error evaluating preprocessor expression: %s [%s]" % (expr, expr2) + print " ", sys.exc_info()[1] + ev = False + return ev + + + + #def updateMacroDefns(self): + ##self.macroExpr << MatchFirst( [Keyword(m)('macro') for m in self.defs['macros']] ) + ##self.macroExpr.setParseAction(self.processMacroRef) + + ## regex is faster than pyparsing. + ## Matches quoted strings and macros + + ##names = self.defs['macros'].keys() + self.defs['fnmacros'].keys() + #if len(self.macroListString) == 0: + #self.macroRegex = None + #else: + #self.macroRegex = re.compile( + #r'("(\\"|[^"])*")|(\b(%s)\b)' % self.macroListString + #) + + #def updateFnMacroDefns(self): + #self.fnMacroExpr << MatchFirst( [(Keyword(m)('macro') + lparen + Group(delimitedList(expression))('args') + rparen) for m in self.defs['fnmacros']] ) + #self.fnMacroExpr.setParseAction(self.processFnMacroRef) + + + def processMacroDefn(self, t): + """Parse a #define macro and register the definition""" + if self.verbose: + print "MACRO:", t + #macroVal = self.macroExpander.transformString(t.value).strip() + #macroVal = Literal('\\\n').suppress().transformString(macroVal) ## remove escaped newlines + macroVal = t.value.strip() + if macroVal in self.defs['fnmacros']: + self.addDef('fnmacros', t.macro, self.defs['fnmacros'][macroVal]) + if self.verbose: + print " Copy fn macro %s => %s" % (macroVal, t.macro) + else: + if t.args == '': + val = self.evalExpr(macroVal) + self.addDef('macros', t.macro, macroVal) + self.addDef('values', t.macro, val) + if self.verbose: + print " Add macro:", t.macro, "("+str(val)+")", self.defs['macros'][t.macro] + else: + self.addDef('fnmacros', t.macro, self.compileFnMacro(macroVal, [x for x in t.args])) + if self.verbose: + print " Add fn macro:", t.macro, t.args, self.defs['fnmacros'][t.macro] + + #if self.macroListString == '': + #self.macroListString = t.macro + #else: + #self.macroListString += '|' + t.macro + #self.updateMacroDefns() + #self.macroExpr << MatchFirst( map(Keyword,self.defs['macros'].keys()) ) + return "#define " + t.macro + " " + macroVal + + + def compileFnMacro(self, text, args): + """Turn a function macro spec into a compiled description""" + ## find all instances of each arg in text + argRegex = re.compile(r'("(\\"|[^"])*")|(\b(%s)\b)' % ('|'.join(args))) + start = 0 + parts = [] + argOrder = [] + N = 3 + for m in argRegex.finditer(text): + arg = m.groups()[N] + #print m, arg + if arg is not None: + parts.append(text[start:m.start(N)] + '%s') + start = m.end(N) + argOrder.append(args.index(arg)) + parts.append(text[start:]) + return (''.join(parts), argOrder) + + + def expandMacros(self, line): + reg = re.compile(r'("(\\"|[^"])*")|(\b(\w+)\b)') + parts = [] + start = 0 + N = 3 ## the group number to check for macro names + macros = self.defs['macros'] + fnmacros = self.defs['fnmacros'] + for m in reg.finditer(line): + name = m.groups()[N] + if name in macros: + parts.append(line[start:m.start(N)]) + start = m.end(N) + parts.append(macros[name]) + elif name in fnmacros: + try: ## If function macro expansion fails, just ignore it. + exp, end = self.expandFnMacro(name, line[m.end(N):]) + parts.append(line[start:m.start(N)]) + start = end + m.end(N) + parts.append(exp) + except: + if sys.exc_info()[1][0] != 0: + print "Function macro expansion failed:", name, line[m.end(N):] + raise + parts.append(line[start:]) + return ''.join(parts) + + + + #def expandMacros(self, line): + #if self.macroRegex is None: + #return line + #parts = [] + #start = 0 + #N = 3 ## the group number to check for macro names + #for m in self.macroRegex.finditer(line): + #name = m.groups()[N] + #if name is not None: + #if name in self.defs['macros']: + #parts.append(line[start:m.start(N)]) + #start = m.end(N) + #parts.append(self.defs['macros'][name]) + #elif name in self.defs['fnmacros']: + #try: ## If function macro expansion fails, just ignore it. + #exp, end = self.expandFnMacro(name, line[m.end(N):]) + #parts.append(line[start:m.start(N)]) + #start = end + m.end(N) + #parts.append(exp) + #except: + #if sys.exc_info()[1][0] != 0: + #print "Function macro expansion failed:", name, line[m.end(N):] + #raise + + #else: + #raise Exception("Macro '%s' not found (internal error)" % name) + #parts.append(line[start:]) + #return ''.join(parts) + + def expandFnMacro(self, name, text): + #print "expandMacro:", name, text + defn = self.defs['fnmacros'][name] + ## defn looks like ('%s + %s / %s', (0, 0, 1)) + + argList = stringStart + lparen + Group(delimitedList(expression))('args') + rparen + res = [x for x in argList.scanString(text, 1)] + if len(res) == 0: + raise Exception(0, "Function macro '%s' not followed by (...)" % name) + args, start, end = res[0] + #print " ", res + #print " ", args + #print " ", defn + newStr = defn[0] % tuple([args[0][i] for i in defn[1]]) + #print " ", newStr + return (newStr, end) + + + # parse action to replace macro references with their respective definition + #def processMacroRef(self, t): + #return self.defs['macros'][t.macro] + + #def processFnMacroRef(self, t): + #m = self.defs['fnmacros'][t.macro] + ##print "=====>>" + ##print "Process FN MACRO:", t + ##print " macro defn:", t.macro, m + ##print " macro call:", t.args + ### m looks like ('a + b', ('a', 'b')) + #newStr = m[0][:] + ##print " starting str:", newStr + #try: + #for i in range(len(m[1])): + ##print " step", i + #arg = m[1][i] + ##print " arg:", arg, '=>', t.args[i] + + #newStr = Keyword(arg).copy().setParseAction(lambda: t.args[i]).transformString(newStr) + ##print " new str:", newStr + #except: + ##sys.excepthook(*sys.exc_info()) + #raise + ##print "<<=====" + #return newStr + + + + + + + + + def parseDefs(self, file, returnUnparsed=False): + """Scan through the named file for variable, struct, enum, and function declarations. + Returns the entire tree of successfully parsed tokens. + If returnUnparsed is True, return a string of all lines that failed to match (for debugging).""" + self.assertPyparsing() + self.currentFile = file + #self.definedType << kwl(self.defs['types'].keys()) + + parser = self.buildParser() + if returnUnparsed: + text = parser.suppress().transformString(self.files[file]) + return re.sub(r'\n\s*\n', '\n', text) + else: + return [x[0] for x in parser.scanString(self.files[file])] + + def buildParser(self): + """Builds the entire tree of parser elements for the C language (the bits we support, anyway). + """ + + if hasattr(self, 'parser'): + return self.parser + + + self.assertPyparsing() + + + self.structType = Forward() + self.enumType = Forward() + self.typeSpec = (typeQualifier + ( + fundType | + Optional(kwl(sizeModifiers + signModifiers)) + ident | + self.structType | + self.enumType + ) + typeQualifier + msModifier).setParseAction(recombine) + #self.argList = Forward() + + ### Abstract declarators for use in function pointer arguments + # Thus begins the extremely hairy business of parsing C declarators. + # Whomever decided this was a reasonable syntax should probably never breed. + # The following parsers combined with the processDeclarator function + # allow us to turn a nest of type modifiers into a correctly + # ordered list of modifiers. + + self.declarator = Forward() + self.abstractDeclarator = Forward() + + ## abstract declarators look like: + # + # * + # **[num] + # (*)(int, int) + # *( )(int, int)[10] + # ...etc... + self.abstractDeclarator << Group( + typeQualifier + Group(ZeroOrMore('*'))('ptrs') + typeQualifier + + ((Optional('&')('ref')) | (lparen + self.abstractDeclarator + rparen)('center')) + + Optional(lparen + Optional(delimitedList(Group( + self.typeSpec('type') + + self.abstractDeclarator('decl') + + Optional(Literal('=').suppress() + expression, default=None)('val') + )), default=None) + rparen)('args') + + Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + rbrack))('arrays') + ) + + ## Argument list may consist of declarators or abstract declarators + #self.argList << delimitedList(Group( + #self.typeSpec('type') + + #(self.declarator('decl') | self.abstractDeclarator('decl')) + + #Optional(Keyword('=')) + expression + #)) + + ## declarators look like: + # varName + # *varName + # **varName[num] + # (*fnName)(int, int) + # * fnName(int arg1=0)[10] + # ...etc... + self.declarator << Group( + typeQualifier + callConv + Group(ZeroOrMore('*'))('ptrs') + typeQualifier + + ((Optional('&')('ref') + ident('name')) | (lparen + self.declarator + rparen)('center')) + + Optional(lparen + Optional(delimitedList(Group( + self.typeSpec('type') + + (self.declarator | self.abstractDeclarator)('decl') + + Optional(Literal('=').suppress() + expression, default=None)('val') + )), default=None) + rparen)('args') + + Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + rbrack))('arrays') + ) + self.declaratorList = Group(delimitedList(self.declarator)) + + ## typedef + self.typeDecl = Keyword('typedef') + self.typeSpec('type') + self.declaratorList('declList') + semi + self.typeDecl.setParseAction(self.processTypedef) + + ## variable declaration + self.variableDecl = Group(self.typeSpec('type') + Optional(self.declaratorList('declList')) + Optional(Literal('=').suppress() + (expression('value') | (lbrace + Group(delimitedList(expression))('arrayValues') + rbrace)))) + semi + + self.variableDecl.setParseAction(self.processVariable) + + ## function definition + #self.paramDecl = Group(self.typeSpec + (self.declarator | self.abstractDeclarator)) + Optional(Literal('=').suppress() + expression('value')) + self.typelessFunctionDecl = self.declarator('decl') + nestedExpr('{', '}').suppress() + self.functionDecl = self.typeSpec('type') + self.declarator('decl') + nestedExpr('{', '}').suppress() + self.functionDecl.setParseAction(self.processFunction) + + + ## Struct definition + self.structDecl = Forward() + structKW = (Keyword('struct') | Keyword('union')) + #self.structType << structKW('structType') + ((Optional(ident)('name') + lbrace + Group(ZeroOrMore( Group(self.structDecl | self.variableDecl.copy().setParseAction(lambda: None)) ))('members') + rbrace) | ident('name')) + self.structMember = ( + Group(self.variableDecl.copy().setParseAction(lambda: None)) | + (self.typeSpec + self.declarator + nestedExpr('{', '}')).suppress() | + (self.declarator + nestedExpr('{', '}')).suppress() + ) + self.declList = lbrace + Group(OneOrMore(self.structMember))('members') + rbrace + self.structType << (Keyword('struct') | Keyword('union'))('structType') + ((Optional(ident)('name') + self.declList) | ident('name')) + + self.structType.setParseAction(self.processStruct) + #self.updateStructDefn() + + self.structDecl = self.structType + semi + + ## enum definition + enumVarDecl = Group(ident('name') + Optional(Literal('=').suppress() + (integer('value') | ident('valueName')))) + + self.enumType << Keyword('enum') + (Optional(ident)('name') + lbrace + Group(delimitedList(enumVarDecl))('members') + rbrace | ident('name')) + self.enumType.setParseAction(self.processEnum) + + self.enumDecl = self.enumType + semi + + + #self.parser = (self.typeDecl | self.variableDecl | self.structDecl | self.enumDecl | self.functionDecl) + self.parser = (self.typeDecl | self.variableDecl | self.functionDecl) + return self.parser + + def processDeclarator(self, decl): + """Process a declarator (without base type) and return a tuple (name, [modifiers]) + See processType(...) for more information.""" + toks = [] + name = None + #print "DECL:", decl + if 'callConv' in decl and len(decl['callConv']) > 0: + toks.append(decl['callConv']) + if 'ptrs' in decl and len(decl['ptrs']) > 0: + toks.append('*' * len(decl['ptrs'])) + if 'arrays' in decl and len(decl['arrays']) > 0: + #arrays = [] + #for x in decl['arrays']: + #n = self.evalExpr(x) + #if n == -1: ## If an array was given as '[]', interpret it as '*' instead. + #toks.append('*') + #else: + #arrays.append(n) + #if len(arrays) > 0: + #toks.append(arrays) + toks.append([self.evalExpr(x) for x in decl['arrays']]) + if 'args' in decl and len(decl['args']) > 0: + #print " process args" + if decl['args'][0] is None: + toks.append(()) + else: + toks.append(tuple([self.processType(a['type'], a['decl']) + (a['val'][0],) for a in decl['args']])) + if 'ref' in decl: + toks.append('&') + if 'center' in decl: + (n, t) = self.processDeclarator(decl['center'][0]) + if n is not None: + name = n + toks.extend(t) + if 'name' in decl: + name = decl['name'] + return (name, toks) + + def processType(self, typ, decl): + """Take a declarator + base type and return a serialized name/type description. + The description will be a list of elements (name, [basetype, modifier, modifier, ...]) + - name is the string name of the declarator or None for an abstract declarator + - basetype is the string representing the base type + - modifiers can be: + '*' - pointer (multiple pointers "***" allowed) + '&' - reference + '__X' - calling convention (windows only). X can be 'cdecl' or 'stdcall' + list - array. Value(s) indicate the length of each array, -1 for incomplete type. + tuple - function, items are the output of processType for each function argument. + + Examples: + int *x[10] => ('x', ['int', [10], '*']) + char fn(int x) => ('fn', ['char', [('x', ['int'])]]) + struct s (*)(int, int*) => (None, ["struct s", ((None, ['int']), (None, ['int', '*'])), '*']) + """ + #print "PROCESS TYPE/DECL:", typ, decl + (name, decl) = self.processDeclarator(decl) + return (name, [typ] + decl) + + + + def processEnum(self, s, l, t): + try: + if self.verbose: + print "ENUM:", t + if t.name == '': + n = 0 + while True: + name = 'anonEnum%d' % n + if name not in self.defs['enums']: + break + n += 1 + else: + name = t.name[0] + + if self.verbose: + print " name:", name + + if name not in self.defs['enums']: + i = 0 + enum = {} + for v in t.members: + if v.value != '': + i = eval(v.value) + if v.valueName != '': + i = enum[v.valueName] + enum[v.name] = i + self.addDef('values', v.name, i) + i += 1 + if self.verbose: + print " members:", enum + self.addDef('enums', name, enum) + self.addDef('types', 'enum '+name, ('enum', name)) + return ('enum ' + name) + except: + if self.verbose: + print "Error processing enum:", t + sys.excepthook(*sys.exc_info()) + + + def processFunction(self, s, l, t): + if self.verbose: + print "FUNCTION", t, t.keys() + + try: + (name, decl) = self.processType(t.type, t.decl[0]) + if len(decl) == 0 or type(decl[-1]) != tuple: + print t + raise Exception("Incorrect declarator type for function definition.") + if self.verbose: + print " name:", name + print " sig:", decl + self.addDef('functions', name, (decl[:-1], decl[-1])) + + except: + if self.verbose: + print "Error processing function:", t + sys.excepthook(*sys.exc_info()) + + + def packingAt(self, line): + """Return the structure packing value at the given line number""" + packing = None + for p in self.packList[self.currentFile]: + if p[0] <= line: + packing = p[1] + else: + break + return packing + + def processStruct(self, s, l, t): + try: + strTyp = t.structType # struct or union + + ## check for extra packing rules + packing = self.packingAt(lineno(l, s)) + + if self.verbose: + print strTyp.upper(), t.name, t + if t.name == '': + n = 0 + while True: + sname = 'anon_%s%d' % (strTyp, n) + if sname not in self.defs[strTyp+'s']: + break + n += 1 + else: + if type(t.name) is str: + sname = t.name + else: + sname = t.name[0] + if self.verbose: + print " NAME:", sname + if len(t.members) > 0 or sname not in self.defs[strTyp+'s'] or self.defs[strTyp+'s'][sname] == {}: + if self.verbose: + print " NEW " + strTyp.upper() + struct = [] + for m in t.members: + typ = m[0].type + val = self.evalExpr(m) + if self.verbose: + print " member:", m, m[0].keys(), m[0].declList + if len(m[0].declList) == 0: ## anonymous member + struct.append((None, [typ], None)) + for d in m[0].declList: + (name, decl) = self.processType(typ, d) + struct.append((name, decl, val)) + if self.verbose: + print " ", name, decl, val + self.addDef(strTyp+'s', sname, {'pack': packing, 'members': struct}) + self.addDef('types', strTyp+' '+sname, (strTyp, sname)) + #self.updateStructDefn() + return strTyp+' '+sname + except: + #print t + sys.excepthook(*sys.exc_info()) + + def processVariable(self, s, l, t): + if self.verbose: + print "VARIABLE:", t + try: + val = self.evalExpr(t[0]) + for d in t[0].declList: + (name, typ) = self.processType(t[0].type, d) + if type(typ[-1]) is tuple: ## this is a function prototype + if self.verbose: + print " Add function prototype:", name, typ, val + self.addDef('functions', name, (typ[:-1], typ[-1])) + else: + if self.verbose: + print " Add variable:", name, typ, val + self.addDef('variables', name, (val, typ)) + self.addDef('values', name, val) + except: + #print t, t[0].name, t.value + sys.excepthook(*sys.exc_info()) + + def processTypedef(self, s, l, t): + if self.verbose: + print "TYPE:", t + typ = t.type + #print t, t.type + for d in t.declList: + (name, decl) = self.processType(typ, d) + if self.verbose: + print " ", name, decl + self.addDef('types', name, decl) + #self.definedType << MatchFirst( map(Keyword,self.defs['types'].keys()) ) + + def evalExpr(self, toks): + ## Evaluates expressions. Currently only works for expressions that also + ## happen to be valid python expressions. + ## This function does not currently include previous variable + ## declarations, but that should not be too difficult to implement.. + #print "Eval:", toks + try: + if isinstance(toks, basestring): + #print " as string" + val = self.eval(toks, None, self.defs['values']) + elif toks.arrayValues != '': + #print " as list:", toks.arrayValues + val = [self.eval(x, None, self.defs['values']) for x in toks.arrayValues] + elif toks.value != '': + #print " as value" + val = self.eval(toks.value, None, self.defs['values']) + else: + #print " as None" + val = None + return val + except: + if self.verbose: + print " failed eval:", toks + print " ", sys.exc_info()[1] + return None + + def eval(self, expr, *args): + """Just eval with a little extra robustness.""" + expr = expr.strip() + cast = (lparen + self.typeSpec + self.abstractDeclarator + rparen).suppress() + expr = (quotedString | number | cast).transformString(expr) + if expr == '': + return None + return eval(expr, *args) + + def printAll(self, file=None): + """Print everything parsed from files. Useful for debugging.""" + from pprint import pprint + for k in self.dataList: + print "============== %s ==================" % k + if file is None: + pprint(self.defs[k]) + else: + pprint(self.fileDefs[file][k]) + + def addDef(self, typ, name, val): + """Add a definition of a specific type to both the definition set for the current file and the global definition set.""" + self.defs[typ][name] = val + if self.currentFile is None: + baseName = None + else: + baseName = os.path.basename(self.currentFile) + if baseName not in self.fileDefs: + self.fileDefs[baseName] = {} + for k in self.dataList: + self.fileDefs[baseName][k] = {} + self.fileDefs[baseName][typ][name] = val + + def remDef(self, typ, name): + if self.currentFile is None: + baseName = None + else: + baseName = os.path.basename(self.currentFile) + del self.defs[typ][name] + del self.fileDefs[baseName][typ][name] + + + def isFundType(self, typ): + """Return True if this type is a fundamental C type, struct, or union""" + if typ[0][:7] == 'struct ' or typ[0][:6] == 'union ' or typ[0][:5] == 'enum ': + return True + + names = baseTypes + sizeModifiers + signModifiers + for w in typ[0].split(): + if w not in names: + return False + return True + + def evalType(self, typ): + """evaluate a named type into its fundamental type""" + used = [] + while True: + if self.isFundType(typ): + ## remove 'signed' before returning evaluated type + typ[0] = re.sub(r'\bsigned\b', '', typ[0]).strip() + + + return typ + parent = typ[0] + if parent in used: + raise Exception('Recursive loop while evaluating types. (typedefs are %s)' % (' -> '.join(used+[parent]))) + used.append(parent) + if not parent in self.defs['types']: + raise Exception('Unknown type "%s" (typedefs are %s)' % (parent, ' -> '.join(used))) + pt = self.defs['types'][parent] + typ = pt + typ[1:] + + def find(self, name): + """Search all definitions for the given name""" + res = [] + for f in self.fileDefs: + fd = self.fileDefs[f] + for t in fd: + typ = fd[t] + for k in typ: + if isinstance(name, basestring): + if k == name: + res.append((f, t)) + else: + if re.match(name, k): + res.append((f, t, k)) + return res + + + + def findText(self, text): + """Search all file strings for text, return matching lines.""" + res = [] + for f in self.files: + l = self.files[f].split('\n') + for i in range(len(l)): + if text in l[i]: + res.append((f, i, l[i])) + return res + + +hasPyParsing = False +try: + from pyparsing import * + ParserElement.enablePackrat() + hasPyParsing = True +except: + pass ## no need to do anything yet as we might not be using any parsing functions.. + + +## Define some common language elements if pyparsing is available. +if hasPyParsing: + ## Some basic definitions + expression = Forward() + pexpr = '(' + expression + ')' + numTypes = ['int', 'float', 'double', '__int64'] + baseTypes = ['char', 'bool', 'void'] + numTypes + sizeModifiers = ['short', 'long'] + signModifiers = ['signed', 'unsigned'] + qualifiers = ['const', 'static', 'volatile', 'inline', 'restrict', 'near', 'far'] + msModifiers = ['__based', '__declspec', '__fastcall', '__restrict', '__sptr', '__uptr', '__w64', '__unaligned', '__nullterminated'] + keywords = ['struct', 'enum', 'union', '__stdcall', '__cdecl'] + qualifiers + baseTypes + sizeModifiers + signModifiers + + def kwl(strs): + """Generate a match-first list of keywords given a list of strings.""" + #return MatchFirst(map(Keyword,strs)) + return Regex(r'\b(%s)\b' % '|'.join(strs)) + + keyword = kwl(keywords) + wordchars = alphanums+'_$' + ident = (WordStart(wordchars) + ~keyword + Word(alphas+"_",alphanums+"_$") + WordEnd(wordchars)).setParseAction(lambda t: t[0]) + #integer = Combine(Optional("-") + (Word( nums ) | Combine("0x" + Word(hexnums)))) + semi = Literal(";").ignore(quotedString).suppress() + lbrace = Literal("{").ignore(quotedString).suppress() + rbrace = Literal("}").ignore(quotedString).suppress() + lbrack = Literal("[").ignore(quotedString).suppress() + rbrack = Literal("]").ignore(quotedString).suppress() + lparen = Literal("(").ignore(quotedString).suppress() + rparen = Literal(")").ignore(quotedString).suppress() + hexint = Regex('-?0x[%s]+[UL]*'%hexnums).setParseAction(lambda t: t[0].rstrip('UL')) + decint = Regex(r'-?\d+[UL]*').setParseAction(lambda t: t[0].rstrip('UL')) + integer = (hexint | decint) + floating = Regex(r'-?((\d+(\.\d*)?)|(\.\d+))([eE]-?\d+)?') + number = (hexint | floating | decint) + bitfieldspec = ":" + integer + biOperator = oneOf("+ - / * | & || && ! ~ ^ % == != > < >= <= -> . :: << >> = ? :") + uniRightOperator = oneOf("++ --") + uniLeftOperator = oneOf("++ -- - + * sizeof new") + name = (WordStart(wordchars) + Word(alphas+"_",alphanums+"_$") + WordEnd(wordchars)) + #number = Word(hexnums + ".-+xUL").setParseAction(lambda t: t[0].rstrip('UL')) + #stars = Optional(Word('*&'), default='')('ptrs') ## may need to separate & from * later? + callConv = Optional(Keyword('__cdecl')|Keyword('__stdcall'))('callConv') + + ## Removes '__name' from all type specs.. may cause trouble. + underscore2Ident = (WordStart(wordchars) + ~keyword + '__' + Word(alphanums,alphanums+"_$") + WordEnd(wordchars)).setParseAction(lambda t: t[0]) + typeQualifier = ZeroOrMore((underscore2Ident + Optional(nestedExpr())) | kwl(qualifiers)).suppress() + + msModifier = ZeroOrMore(kwl(msModifiers) + Optional(nestedExpr())).suppress() + pointerOperator = ( + '*' + typeQualifier | + '&' + typeQualifier | + '::' + ident + typeQualifier + ) + + + ## language elements + fundType = OneOrMore(kwl(signModifiers + sizeModifiers + baseTypes)).setParseAction(lambda t: ' '.join(t)) + + + + ## Is there a better way to process expressions with cast operators?? + castAtom = ( + ZeroOrMore(uniLeftOperator) + Optional('('+ident+')').suppress() + + (( + ident + '(' + Optional(delimitedList(expression)) + ')' | + ident + OneOrMore('[' + expression + ']') | + ident | number | quotedString + ) | + ('(' + expression + ')')) + + ZeroOrMore(uniRightOperator) + ) + uncastAtom = ( + ZeroOrMore(uniLeftOperator) + + (( + ident + '(' + Optional(delimitedList(expression)) + ')' | + ident + OneOrMore('[' + expression + ']') | + ident | number | quotedString + ) | + ('(' + expression + ')')) + + ZeroOrMore(uniRightOperator) + ) + atom = castAtom | uncastAtom + + expression << Group( + atom + ZeroOrMore(biOperator + atom) + ) + arrayOp = lbrack + expression + rbrack + + def recombine(tok): + """Flattens a tree of tokens and joins into one big string.""" + return " ".join(flatten(tok.asList())) + expression.setParseAction(recombine) + + def flatten(lst): + res = [] + for i in lst: + if type(i) in [list, tuple]: + res.extend(flatten(i)) + else: + res.append(str(i)) + return res + + def printParseResults(pr, depth=0, name=''): + """For debugging; pretty-prints parse result objects.""" + start = name + " "*(20-len(name)) + ':'+ '..'*depth + if isinstance(pr, ParseResults): + print start + for i in pr: + name = '' + for k in pr.keys(): + if pr[k] is i: + name = k + break + printParseResults(i, depth+1, name) + else: + print start + str(pr) + + + +## Just for fun.. +if __name__ == '__main__': + files = sys.argv[1:] + p = CParser(files) + p.processAll() + p.printAll() + \ No newline at end of file diff --git a/scripts/pyclibrary/README.md b/scripts/pyclibrary/README.md new file mode 100644 index 000000000..f1af9afb7 --- /dev/null +++ b/scripts/pyclibrary/README.md @@ -0,0 +1,8 @@ +pyclibrary +========== + +C parser and ctypes automation for Python. + +Fork of . (`bzr branch lp:pyclibrary pyclibrary-bzr && mkdir pyclibrary && cd pyclibrary && bar fast-export --plain ../pyclibrary-bzr | git fast-import`) + +Pyclibrary includes 1) a pure-python C parser and 2) a ctypes automation library that uses C header file definitions to simplify the use of ctypes. The C parser currently processes all macros, typedefs, structs, unions, enums, function prototypes, and global variable declarations, and can evaluate typedefs down to their fundamental C types + pointers/arrays/function signatures. Pyclibrary can automatically build ctypes structs/unions and perform type conversions when calling functions via cdll/windll. diff --git a/scripts/pyclibrary/__init__.py b/scripts/pyclibrary/__init__.py new file mode 100644 index 000000000..618aaa1a8 --- /dev/null +++ b/scripts/pyclibrary/__init__.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- +from CParser import * +from CLibrary import * \ No newline at end of file diff --git a/scripts/pyclibrary/license.txt b/scripts/pyclibrary/license.txt new file mode 100644 index 000000000..3d04b87ea --- /dev/null +++ b/scripts/pyclibrary/license.txt @@ -0,0 +1,7 @@ +Copyright (c) 2010 Luke Campagnola ('luke.campagnola@%s.com' % 'gmail') + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/scripts/pyclibrary/pyparsing.py b/scripts/pyclibrary/pyparsing.py new file mode 100644 index 000000000..dec506ed0 --- /dev/null +++ b/scripts/pyclibrary/pyparsing.py @@ -0,0 +1,3754 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2011 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +#from __future__ import generators + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word( alphas ) + "," + Word( alphas ) + "!" + + hello = "Hello, World!" + print hello, "->", greet.parseString( hello ) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments +""" + +__version__ = "1.5.6" +__versionTime__ = "1 May 2011 23:41" +__author__ = "Paul McGuire " + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'getTokensEndLoc', 'hexnums', +'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', +] + +""" +Detect if we are running version 3.X and make appropriate changes +Robert A. Clark +""" +_PY3K = sys.version_info[0] > 2 +if _PY3K: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + alphas = string.ascii_lowercase + string.ascii_uppercase +else: + _MAX_INT = sys.maxint + range = xrange + set = lambda s : dict( [(c,0) for c in s] ) + alphas = string.lowercase + string.uppercase + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # The Python docs (http://docs.python.org/ref/customization.html#l2h-182) + # state that "The return value must be a string object". However, does a + # unicode object (being a subclass of basestring) count as a "string + # object"? + # If so, then return a unicode object: + return unicode(obj) + # Else encode it... but how? There are many choices... :) + # Replace unprintables with escape codes? + #return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors') + # Replace unprintables with question marks? + #return unicode(obj).encode(sys.getdefaultencoding(), 'replace') + # ... + + alphas = string.lowercase + string.uppercase + +# build list of single arg builtins, tolerant of Python version, that can be used as parse actions +singleArgBuiltins = [] +import __builtin__ +for fname in "sum len enumerate sorted reversed list tuple set any all".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ['&'+s+';' for s in "amp gt lt quot apos".split()] + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +nums = string.digits +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join( [ c for c in string.printable if c not in string.whitespace ] ) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join( [line_str[:line_column], + markerString, line_str[line_column:]]) + return line_str.strip() + def __dir__(self): + return "loc msg pstr parserElement lineno col line " \ + "markInputLine __str__ __repr__".split() + +class ParseException(ParseBaseException): + """exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like C{ParseFatalException}, but thrown internally when an + C{ErrorStop} ('-' operator) indicates that parsing is to stop immediately because + an unbacktrackable syntax error has been found""" + def __init__(self, pe): + super(ParseSyntaxException, self).__init__( + pe.pstr, pe.loc, pe.msg, pe.parserElement) + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by C{validate()} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.}) + """ + #~ __slots__ = ( "__toklist", "__tokdict", "__doinit", "__name", "__parent", "__accumNames", "__weakref__" ) + def __new__(cls, toklist, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + if isinstance(toklist, list): + self.__toklist = toklist[:] + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not toklist in (None,'',[]): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,int): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name in self.__tokdict: + occurrences = self.__tokdict[name] + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return len( self.__toklist ) > 0 + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def keys( self ): + """Returns all named result keys.""" + return self.__tokdict.keys() + + def pop( self, index=-1 ): + """Removes and returns item at specified index (default=last). + Will work with either numeric indices or dict-key indicies.""" + ret = self[index] + del self[index] + return ret + + def get(self, key, defaultValue=None): + """Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified.""" + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """Inserts new element at location index in the list of parsed tokens.""" + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name in self.__tokdict: + occurrences = self.__tokdict[name] + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def items( self ): + """Returns all named result keys and values as a list of tuples.""" + return [(k,self[k]) for k in self.__tokdict] + + def values( self ): + """Returns all named result values.""" + return [ v[-1][0] for v in self.__tokdict.values() ] + + def __getattr__( self, name ): + if True: #name not in self.__slots__: + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + return None + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = ( lambda a: (a<0 and offset) or (a+offset) ) + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + return self.copy() + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + out = "[" + sep = "" + for i in self.__toklist: + if isinstance(i, ParseResults): + out += sep + _ustr(i) + else: + out += sep + repr(i) + sep = ", " + out += "]" + return out + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """Returns the parse results as a nested list of matching tokens, all converted to strings.""" + out = [] + for res in self.__toklist: + if isinstance(res,ParseResults): + out.append( res.asList() ) + else: + out.append( res ) + return out + + def asDict( self ): + """Returns the named parse results as dictionary.""" + return dict( self.items() ) + + def copy( self ): + """Returns a new copy of a C{ParseResults} object.""" + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.""" + nl = "\n" + out = [] + namedItems = dict( [ (v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist ] ) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + worklist = self.__toklist + for i,res in enumerate(worklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "" ] + + out += [ nl, indent, "" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + """Returns the results name for this token expression.""" + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + self.__tokdict.values()[0][0][1] in (0,-1)): + return self.__tokdict.keys()[0] + else: + return None + + def dump(self,indent='',depth=0): + """Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data.""" + out = [] + out.append( indent+_ustr(self.asList()) ) + keys = self.items() + keys.sort() + for k,v in keys: + if out: + out.append('\n') + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v.keys(): + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(_ustr(v)) + return "".join(out) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + self.__tokdict, \ + par, \ + inAccumNames, \ + self.__name = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __dir__(self): + return dir(super(ParseResults,self)) + self.keys() + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return (loc} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print ("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +'decorator to trim function calls to match the arity of the target' +if not _PY3K: + def _trim_arity(func, maxargs=2): + limit = [0] + def wrapper(*args): + while 1: + try: + return func(*args[limit[0]:]) + except TypeError: + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + return wrapper +else: + def _trim_arity(func, maxargs=2): + limit = maxargs + def wrapper(*args): + #~ nonlocal limit + while 1: + try: + return func(*args[limit:]) + except TypeError: + if limit: + limit -= 1 + continue + raise + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + def setDefaultWhitespaceChars( chars ): + """Overrides the default whitespace chars + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + setDefaultWhitespaceChars = staticmethod(setDefaultWhitespaceChars) + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element.""" + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """Define name for this expression, for use in debugging.""" + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + """ + newself = self.copy() + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """Define action to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a ParseResults object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}} for more information + on parsing strings containing s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = ("callDuringTry" in kwargs and kwargs["callDuringTry"]) + return self + + def addParseAction( self, *fns, **kwargs ): + """Add parse action to expression's list of parse actions. See L{I{setParseAction}}.""" + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or ("callDuringTry" in kwargs and kwargs["callDuringTry"]) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{ParseFatalException} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException: + #~ print ("Exception raised:", err) + err = None + if self.debugActions[2]: + err = sys.exc_info()[1] + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + if err is None: + err = sys.exc_info()[1] + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or loc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + err = sys.exc_info()[1] + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + lookup = (self,instring,loc,callPreParse,doActions) + if lookup in ParserElement._exprArgCache: + value = ParserElement._exprArgCache[ lookup ] + if isinstance(value, Exception): + raise value + return (value[0],value[1].copy()) + else: + try: + value = self._parseNoCache( instring, loc, doActions, callPreParse ) + ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy()) + return value + except ParseBaseException: + pe = sys.exc_info()[1] + ParserElement._exprArgCache[ lookup ] = pe + raise + + _parse = _parseNoCache + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + _exprArgCache = {} + def resetCache(): + ParserElement._exprArgCache.clear() + resetCache = staticmethod(resetCache) + + _packratEnabled = False + def enablePackrat(): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + ParserElement._parse = ParserElement._parseCache + enablePackrat = staticmethod(enablePackrat) + + def parseString( self, instring, parseAll=False ): + """Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{StringEnd()}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}} for more information on parsing + strings with embedded tabs.""" + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def transformString( self, instring ): + """Extension to C{scanString}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string.""" + out = [] + lastE = 0 + # force preservation of s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """Another extension to C{scanString}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def __add__(self, other ): + """Implementation of + operator - returns And""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """Implementation of + operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """Implementation of - operator, returns C{And} with error stop""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, And._ErrorStop(), other ] ) + + def __rsub__(self, other ): + """Implementation of - operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + ZeroOrMore(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{ZeroOrMore(expr)} + - C{expr*(1,None)} is equivalent to C{OneOrMore(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """Implementation of | operator - returns C{MatchFirst}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """Implementation of | operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """Implementation of ^ operator - returns C{Or}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """Implementation of ^ operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """Implementation of & operator - returns C{Each}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """Implementation of & operator when left operand is not a C{ParserElement}""" + if isinstance( other, basestring ): + other = Literal( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """Implementation of ~ operator - returns C{NotAny}""" + return NotAny( self ) + + def __call__(self, name): + """Shortcut for C{setResultsName}, with C{listAllMatches=default}:: + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + could be written as:: + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + """ + if not name.endswith("*"): + return self.setResultsName(name) + else: + return self.setResultsName(name[:-1], listAllMatches=True) + + def suppress( self ): + """Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """Overrides default behavior to expand C{}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{} characters.""" + self.keepTabs = True + return self + + def ignore( self, other ): + """Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + """ + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append( other.copy() ) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """Enable display of debugging messages while doing pattern matching.""" + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable.""" + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """Check defined expressions for valid structure, check for infinite recursive definitions.""" + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + f = open(file_or_filename, "rb") + file_contents = f.read() + f.close() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + exc = sys.exc_info()[1] + raise exc + + def getException(self): + return ParseException("",0,self.errmsg,self) + + def __getattr__(self,aname): + if aname == "myException": + self.myException = ret = self.getException(); + return ret; + else: + raise AttributeError("no such attribute " + aname) + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or self.__dict__ == other.__dict__ + elif isinstance(other, basestring): + try: + self.parseString(_ustr(other), parseAll=True) + return True + except ParseBaseException: + return False + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + +class Token(ParserElement): + """Abstract C{ParserElement} subclass, for defining atomic matching patterns.""" + def __init__( self ): + super(Token,self).__init__( savelist=False ) + #self.myException = ParseException("",0,"",self) + + def setName(self, name): + s = super(Token,self).setName(name) + self.errmsg = "Expected " + self.name + #s.myException.msg = self.errmsg + return s + + +class Empty(Token): + """An empty token, will always match.""" + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """A token that will never match.""" + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + +class Literal(Token): + """Token to exactly match a specified string.""" + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + #self.myException.msg = self.errmsg + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc +_L = Literal + +class Keyword(Token): + """Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{Literal}:: + Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}. + Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive + matching, default is C{False}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ): + super(Keyword,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + setDefaultKeywordChars = staticmethod(setDefaultKeywordChars) + +class CaselessLiteral(Literal): + """Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class CaselessKeyword(Keyword): + def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class Word(Token): + """Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False ): + super(Word,self).__init__() + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.bodyCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + """Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if len(pattern) == 0: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + """Token for matching strings that are delimited by quoting characters. + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None): + """ + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=None) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None) + - multiline - boolean indicating whether quotes can span multiple lines (default=False) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=True) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=None => same as quoteChar) + """ + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if len(quoteChar) == 0: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if len(endQuoteChar) == 0: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join(["%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)]) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + charset = ''.join(set(self.quoteChar[0]+self.endQuoteChar[0])).replace('^',r'\^').replace('-',r'\-') + self.escCharReplacePattern = re.escape(self.escChar)+("([%s])" % charset) + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """Token for matching words composed of characters *not* in a given set. + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + #self.myException.msg = self.errmsg + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{Word} class.""" + whiteStrs = { + " " : "", + "\t": "", + "\n": "", + "\r": "", + "\f": "", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join([c for c in self.whiteChars if c not in self.matchWhite]) ) + #~ self.leaveWhitespace() + self.name = ("".join([White.whiteStrs[c] for c in self.matchWhite])) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + #self.myException.msg = self.errmsg + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + #~ raise ParseException( instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """Token to advance to a specific column of input text; useful for tabular report scraping.""" + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + +class LineStart(_PositionToken): + """Matches if current position is at the beginning of a line within the parse string""" + def __init__( self ): + super(LineStart,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected start of line" + #self.myException.msg = self.errmsg + + def preParse( self, instring, loc ): + preloc = super(LineStart,self).preParse(instring,loc) + if instring[preloc] == "\n": + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + if not( loc==0 or + (loc == self.preParse( instring, 0 )) or + (instring[loc-1] == "\n") ): #col(loc, instring) != 1: + #~ raise ParseException( instring, loc, "Expected start of line" ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + +class LineEnd(_PositionToken): + """Matches if current position is at the end of a line within the parse string""" + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + #self.myException.msg = self.errmsg + + def parseImpl( self, instring, loc, doActions=True ): + if loc len(instring): + return loc, [] + else: + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class WordStart(_PositionToken): + """Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + +class WordEnd(_PositionToken): + """Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + if loc2 > maxMatchLoc: + maxMatchLoc = loc2 + maxMatchExp = e + + if maxMatchLoc < 0: + if maxException is not None: + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + return maxMatchExp._parse( instring, loc, doActions ) + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = Literal( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if exprs: + self.mayReturnEmpty = False + for e in self.exprs: + if e.mayReturnEmpty: + self.mayReturnEmpty = True + break + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException, err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = Literal( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = True + for e in self.exprs: + if not e.mayReturnEmpty: + self.mayReturnEmpty = False + break + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and e not in opt1 ] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(e) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join( [ _ustr(e) for e in tmpReqd ] ) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = ParseResults([]) + for r in resultlist: + dups = {} + for k in r.keys(): + if k in finalResults.keys(): + tmp = ParseResults(finalResults[k]) + tmp += ParseResults(r[k]) + dups[k] = tmp + finalResults += ParseResults(r) + for k,v in dups.items(): + finalResults[k] = v + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join( [ _ustr(e) for e in self.exprs ] ) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.""" + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + expr = Literal(expr) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """Lookahead matching of the given parse expression. C{FollowedBy} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list.""" + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """Lookahead to disallow matching with the given parse expression. C{NotAny} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression does *not* match at the current + position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator.""" + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + #self.myException = ParseException("",0,self.errmsg,self) + + def parseImpl( self, instring, loc, doActions=True ): + try: + self.expr.tryParse( instring, loc ) + except (ParseException,IndexError): + pass + else: + #~ raise ParseException(instring, loc, self.errmsg ) + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + + +class ZeroOrMore(ParseElementEnhance): + """Optional repetition of zero or more of the given expression.""" + def __init__( self, expr ): + super(ZeroOrMore,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + tokens = [] + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.keys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + + +class OneOrMore(ParseElementEnhance): + """Repetition of one or more of the given expression.""" + def parseImpl( self, instring, loc, doActions=True ): + # must be at least one + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.keys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(OneOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """Optional matching of the given expression. + A default return string can also be specified, if the optional expression + is not found. + """ + def __init__( self, exprs, default=_optionalNotMatched ): + super(Optional,self).__init__( exprs, savelist=False ) + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + + +class SkipTo(ParseElementEnhance): + """Token for skipping over all undefined text until the matched expression is found. + If C{include} is set to true, the matched expression is also parsed (the skipped text + and matched expression are returned as a 2-element list). The C{ignore} + argument is used to define grammars (typically quoted strings and comments) that + might contain false matches. + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if failOn is not None and isinstance(failOn, basestring): + self.failOn = Literal(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + #self.myException = ParseException("",0,self.errmsg,self) + + def parseImpl( self, instring, loc, doActions=True ): + startLoc = loc + instrlen = len(instring) + expr = self.expr + failParse = False + while loc <= instrlen: + try: + if self.failOn: + try: + self.failOn.tryParse(instring, loc) + except ParseBaseException: + pass + else: + failParse = True + raise ParseException(instring, loc, "Found expression " + str(self.failOn)) + failParse = False + if self.ignoreExpr is not None: + while 1: + try: + loc = self.ignoreExpr.tryParse(instring,loc) + # print "found ignoreExpr, advance to", loc + except ParseBaseException: + break + expr._parse( instring, loc, doActions=False, callPreParse=False ) + skipText = instring[startLoc:loc] + if self.includeMatch: + loc,mat = expr._parse(instring,loc,doActions,callPreParse=False) + if mat: + skipRes = ParseResults( skipText ) + skipRes += mat + return loc, [ skipRes ] + else: + return loc, [ skipText ] + else: + return loc, [ skipText ] + except (ParseException,IndexError): + if failParse: + raise + else: + loc += 1 + exc = self.myException + exc.loc = loc + exc.pstr = instring + raise exc + +class Forward(ParseElementEnhance): + """Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = Literal(other) + self.expr = other + self.mayReturnEmpty = other.mayReturnEmpty + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return None + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret << self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """Abstract subclass of C{ParseExpression}, for converting parsed results.""" + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Upcase(TokenConverter): + """Converter to upper case all matching tokens.""" + def __init__(self, *args): + super(Upcase,self).__init__(*args) + warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead", + DeprecationWarning,stacklevel=2) + + def postParse( self, instring, loc, tokenlist ): + return list(map( string.upper, tokenlist )) + + +class Combine(TokenConverter): + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and len(retToks.keys())>0: + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """Converter to return the matched tokens as a list - useful for returning tokens of C{ZeroOrMore} and C{OneOrMore} expressions.""" + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + """ + def __init__( self, exprs ): + super(Dict,self).__init__( exprs ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.keys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """Converter for ignoring the results of a parsed expression.""" + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """Wrapper for parse actions, to ensure they are only called once.""" + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """Decorator for debugging parse actions.""" + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.func_name + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception: + exc = sys.exc_info()[1] + sys.stderr.write( "<", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join( [ _escapeRegexRangeChars(sym) for sym in symbols] ) ) + else: + return Regex( "|".join( [ re.escape(sym) for sym in symbols] ) ) + except: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst( [ parseElementClass(sym) for sym in symbols ] ) + +def dictOf( key, value ): + """Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{Dict}, C{ZeroOrMore}, and C{Group} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. Simpler to use than the parse action C{L{keepOriginalText}}, and does not + require the inspect module to chase up the call stack. By default, returns a + string containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{ParseResults} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values.""" + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + del t[:] + t.insert(0, s[t._original_start:t._original_end]) + del t["_original_start"] + del t["_original_end"] + matchExpr.setParseAction(extractText) + return matchExpr + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_printables_less_backslash = "".join([ c for c in printables if c not in r"\]" ]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(_printables_less_backslash,exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +_expanded = lambda p: (isinstance(p,ParseResults) and ''.join([ unichr(c) for c in range(ord(p[0]),ord(p[1])+1) ]) or p) + +def srange(s): + r"""Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be:: + a single character + an escaped character with a leading backslash (such as \- or \]) + an escaped hex character with a leading '\x' (\x21, which is a '!' character) + (\0x## is also supported for backwards compatibility) + an escaped octal character with a leading '\0' (\041, which is a '!' character) + a range of any of the above, separated by a dash ('a-z', etc.) + any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.) + """ + try: + return "".join([_expanded(part) for part in _reBracketExpr.parseString(s).body]) + except: + return "" + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{transformString()}. + """ + def _replFunc(*args): + return [replStr] + return _replFunc + +def removeQuotes(s,l,t): + """Helper parse action for removing quotation marks from parsed quoted strings. + To use, add this parse action to quoted string using:: + quotedString.setParseAction( removeQuotes ) + """ + return t[0][1:-1] + +def upcaseTokens(s,l,t): + """Helper parse action to convert tokens to upper case.""" + return [ tt.upper() for tt in map(_ustr,t) ] + +def downcaseTokens(s,l,t): + """Helper parse action to convert tokens to lower case.""" + return [ tt.lower() for tt in map(_ustr,t) ] + +def keepOriginalText(s,startLoc,t): + """DEPRECATED - use new helper method C{originalTextFor}. + Helper parse action to preserve original parsed text, + overriding any nested parse actions.""" + try: + endloc = getTokensEndLoc() + except ParseException: + raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action") + del t[:] + t += ParseResults(s[startLoc:endloc]) + return t + +def getTokensEndLoc(): + """Method to be called from within a parse action to determine the end + location of the parsed tokens.""" + import inspect + fstack = inspect.stack() + try: + # search up the stack (through intervening argument normalizers) for correct calling routine + for f in fstack[2:]: + if f[3] == "_parseNoCache": + endloc = f[0].f_locals["loc"] + return endloc + else: + raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action") + finally: + del fstack + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join( [ c for c in printables if c not in ">" ] ) + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % tagStr) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, given a tag name""" + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, given a tag name""" + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """Helper to create a validating parse action to be used with start tags created + with C{makeXMLTags} or C{makeHTMLTags}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{} or C{
}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def operatorPrecedence( baseExpr, opList ): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants opAssoc.RIGHT and opAssoc.LEFT. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + """ + ret = Forward() + lastExpr = baseExpr | ( Suppress('(') + ret + Suppress(')') ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward()#.setName("expr%d" % i) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + matchExpr.setParseAction( pa ) + thisExpr << ( matchExpr | lastExpr ) + lastExpr = thisExpr + ret << lastExpr + return ret + +dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes") +sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes") +quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()) + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default="("); can also be a pyparsing expression + - closer - closing character for a nested list (default=")"); can also be a pyparsing expression + - content - expression for items within the nested lists (default=None) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret << Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret << Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=True) + + A valid block must contain at least one C{blockStatement}. + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = Empty() + Empty().setParseAction(checkSubIndent) + PEER = Empty().setParseAction(checkPeerIndent) + UNDENT = Empty().setParseAction(checkUnindent) + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:")) +commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";").streamline() +_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),'><& "')) +replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment") + +htmlComment = Regex(r"") +restOfLine = Regex(r".*").leaveWhitespace() +dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment") +cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?" + str(tokenlist)) + print ("tokens = " + str(tokens)) + print ("tokens.columns = " + str(tokens.columns)) + print ("tokens.tables = " + str(tokens.tables)) + print (tokens.asXML("SQL",True)) + except ParseBaseException: + err = sys.exc_info()[1] + print (teststring + "->") + print (err.line) + print (" "*(err.column-1) + "^") + print (err) + print() + + selectToken = CaselessLiteral( "select" ) + fromToken = CaselessLiteral( "from" ) + + ident = Word( alphas, alphanums + "_$" ) + columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + columnNameList = Group( delimitedList( columnName ) )#.setName("columns") + tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + tableNameList = Group( delimitedList( tableName ) )#.setName("tables") + simpleSQL = ( selectToken + \ + ( '*' | columnNameList ).setResultsName( "columns" ) + \ + fromToken + \ + tableNameList.setResultsName( "tables" ) ) + + test( "SELECT * from XYZZY, ABC" ) + test( "select * from SYS.XYZZY" ) + test( "Select A from Sys.dual" ) + test( "Select AA,BB,CC from Sys.dual" ) + test( "Select A, B, C from Sys.dual" ) + test( "Select A, B, C from Sys.dual" ) + test( "Xelect A, B, C from Sys.dual" ) + test( "Select A, B, C frox Sys.dual" ) + test( "Select" ) + test( "Select ^^^ frox Sys.dual" ) + test( "Select A, B, C from Sys.dual, Table2 " ) diff --git a/scripts/xml2aloe/__init__.py b/scripts/xml2aloe/__init__.py new file mode 100644 index 000000000..78baf7aaf --- /dev/null +++ b/scripts/xml2aloe/__init__.py @@ -0,0 +1,94 @@ +import shutil +import os + +def MakeModule(m): + dir = os.path.dirname(__file__) + + tdir = os.path.join(dir, 'template') + ddir = tdir + m.directory_name + print 'Creating new directory ' + ddir + + shutil.copytree(tdir,ddir) + + WriteAloeC(m,tdir + '/src/template.c',\ + ddir + '/src/' + m.name + '.c') + WriteAloeH(m,tdir + '/src/template.h',\ + ddir + '/src/' + m.name + '.h') + + +def WriteInitParamsGet(m, f): + for p in m.init_params: + f.write('\tif (param_get_' + p['type'] + '_name("' + p['name'] + '", &' + p['variable']+ ')) {\n' + '\t\t' + p['variable'] + ' = ' + str(p['default']) + ';\n'\ + '\t}\n') + +def WriteInputParamsHandlers(m, f): + for p in m.input_params: + f.write('pmid_t ' + p['name'] + '_id;\n') + +def WriteInputParamsId(m, f): + for p in m.input_params: + f.write('\t' + p['name'] + '_id = param_id("' + p['name'] + '");\n') + +def WriteInputParamsGet(m, f): + for p in m.input_params: + f.write('\tif (param_get_' + p['type'] + '(' + p['name'] + '_id, &' + p['variable'] + ') != 1) {\n') + if p['default'] == None: + f.write('\t\t' + r'moderror("Error getting parameter ' + p['name'] + '\\n");' + '\n') + f.write('\t\treturn -1;\n\t}\n') + else: + f.write('\t\t' + r'modinfo("Parameter ' + p['name'] + ' not found. Setting to default (' + p['default'] + ')\\n");' + '\n') + f.write('\t\t' + p['variable'] + ' = ' + p['default'] + ';\n\t}\n') + + +def WriteAloeC(m, input, output): + with open(input,'r') as f: + newlines = [] + for line in f.readlines(): + newlines.append(line.replace('-name-', m.name)) + with open(output, 'w') as f: + for line in newlines: + + if '--input_parameters_handlers--' in line: + WriteInputParamsHandlers(m, f) + elif '--input_parameters_getid--' in line: + WriteInputParamsId(m, f) + elif '--init_parameters--' in line: + WriteInitParamsGet(m, f) + elif '--input_parameters--' in line: + WriteInputParamsGet(m, f) + else: + f.write(line) + +def WriteAloeH(m, input, output): + with open(input,'r') as f: + newlines = [] + for line in f.readlines(): + if '-typeinput-' in line: + if m.nof_inputs > 0: + newlines.append(line.replace('-typeinput-', m.input_type)) + elif '-numinputs-' in line: + newlines.append(line.replace('-numinputs-', str(m.nof_inputs))) + elif '-sizeinput-' in line: + if m.nof_inputs > 0: + newlines.append(line.replace('-sizeinput-', m.input_size)) + else: + newlines.append(line.replace('-sizeinput-', '0')) + elif '-typeoutput-' in line: + if m.nof_outputs > 0: + newlines.append(line.replace('-typeoutput-', m.output_type)) + elif '-numoutputs-' in line: + newlines.append(line.replace('-numoutputs-', str(m.nof_outputs))) + elif '-sizeoutput-' in line: + if m.nof_outputs > 0: + newlines.append(line.replace('-sizeoutput-', m.output_size)) + else: + newlines.append(line.replace('-sizeoutput-', '0')) + else: + newlines.append(line) + + with open(output, 'w') as f: + for line in newlines: + f.write(line) + + \ No newline at end of file diff --git a/scripts/xml2aloe/__init__.pyc b/scripts/xml2aloe/__init__.pyc new file mode 100644 index 000000000..cf1f75581 Binary files /dev/null and b/scripts/xml2aloe/__init__.pyc differ diff --git a/scripts/xml2aloe/template/CMakeLists.txt b/scripts/xml2aloe/template/CMakeLists.txt new file mode 100644 index 000000000..48fbe4769 --- /dev/null +++ b/scripts/xml2aloe/template/CMakeLists.txt @@ -0,0 +1,94 @@ +# This configuration is for the aloe++ skeleton + +# set-up the program libraries here +set(LIBRARIES m rt) + +# set-up program includes here +include_directories(/usr/local/include/) + +############## DO NOT NEED TO MODIFY BEYOND HERE + +get_filename_component(module ${CMAKE_CURRENT_SOURCE_DIR} NAME) + +if( CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR ) + cmake_minimum_required (VERSION 2.6) + project (${module}) + + # The version number. + set (OECORE_VERSION_MAJOR 1) + set (OECORE_VERSION_MINOR 0) + set(MODULE_REPOS_NAME "default") + +else() + include_directories(${OESR_INCLUDE}) +endif() + + +file(GLOB_RECURSE SOURCES "src/*.c") +file(GLOB_RECURSE TEST_SOURCES "test/*.c") + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) + +# aloe module +add_library(${module}-aloe SHARED ${SOURCES}) +set_target_properties(${module}-aloe PROPERTIES OUTPUT_NAME ${module}) +set_target_properties(${module}-aloe PROPERTIES COMPILE_FLAGS "-D_COMPILE_ALOE") +target_link_libraries(${module}-aloe oesrapi skeleton ${LIBRARIES}) +install(TARGETS ${module}-aloe DESTINATION lib/${MODULE_REPOS_NAME}/) + + +if (NOT ${TEST_SOURCES} STREQUAL "") + # standalone program for testing + add_executable(${module}-bin ${SOURCES} ${TEST_SOURCES}) + set_target_properties(${module}-bin PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-bin PROPERTIES COMPILE_FLAGS "-D_COMPILE_STANDALONE") + target_link_libraries(${module}-bin standalone ${LIBRARIES}) + install(TARGETS ${module}-bin DESTINATION bin) +endif() + +# octave mex file +set(install_mex "") +if(NOT $ENV{OCTAVE_INCLUDE} STREQUAL "") + if(NOT $ENV{OCTAVE_LIBS} STREQUAL "") + + add_library(${module}-oct SHARED ${SOURCES}) + set_target_properties(${module}-oct PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-oct PROPERTIES PREFIX "am_") + set_target_properties(${module}-oct PROPERTIES SUFFIX .mex) + + set_target_properties(${module}-oct PROPERTIES COMPILE_FLAGS "-I$ENV{OCTAVE_INCLUDE} -D_COMPILE_MEX -Wl,-Bsymbolic -L$ENV{OCTAVE_LIBS} -loctinterp -loctave -lcruft -Wl,-Bsymbolic-functions -Wl,-z,relro") + target_link_libraries(${module}-oct aloe_octave ${LIBRARIES}) + install(TARGETS ${module}-oct DESTINATION mex) + + endif() +endif() + +#matlab mex +if(NOT $ENV{MATLAB_ROOT} STREQUAL "") + add_library(${module}-mat SHARED ${SOURCES}) + set_target_properties(${module}-mat PROPERTIES OUTPUT_NAME ${module}) + set_target_properties(${module}-mat PROPERTIES PREFIX "am_") + + if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set_target_properties(${module}-mat PROPERTIES SUFFIX .mexa64) + set_target_properties(${module}-mat PROPERTIES COMPILE_FLAGS "-I$ENV{MATLAB_ROOT} -O -pthread -shared -Wl,--version-script,$ENV{MATLAB_ROOT}/extern/lib/glnxa64/mexFunction.map -Wl,--no-undefined -Wl,-rpath-link,$ENV{MATLAB_ROOT}/bin/glnxa64 -L$ENV{MATLAB_ROOT}/bin/glnxa64 -lmx -lmex -lmat -lm -lstdc++") + else() + set_target_properties(${module}-mat PROPERTIES SUFFIX .mexglx) + set_target_properties(${module}-mat PROPERTIES COMPILE_FLAGS "-I$ENV{MATLAB_ROOT} -O -pthread -shared -m32 -Wl,--version-script,$ENV{MATLAB_ROOT}/extern/lib/glnx86/mexFunction.map -Wl,--no-undefined -Wl,-rpath-link,$ENV{MATLAB_ROOT}/bin/glnx86 -L$ENV{MATLAB_ROOT}/bin/glnx86 -lmx -lmex -lmat -lm -lstdc++") + endif() + + target_link_libraries(${module}-mat aloe_matlab ${LIBRARIES}) + install(TARGETS ${module}-mat DESTINATION mex) + +endif() + + + + + + + + + + + diff --git a/scripts/xml2aloe/template/src/template.c b/scripts/xml2aloe/template/src/template.c new file mode 100644 index 000000000..a96676b01 --- /dev/null +++ b/scripts/xml2aloe/template/src/template.c @@ -0,0 +1,73 @@ +/* + * This file has been automatically generated from -name- + */ + +#include +#include +#include +#include + +#include "-name-.h" + +-name-_hl -name-; + +--input_parameters_handlers-- + +int out_len[NOF_OUTPUT_ITF]; + +int initialize() { + + /* Initialization Parameters */ + --init_parameters-- + + /* Input Control Parameters */ + --input_parameters_getid-- + + /* Initialization function */ + return -name-_initialize(&-name-); +} + + +int work(void **inp, void **out) { + int i,n; +#if NOF_INPUTS>1 + for (i=0;i1 + for (i=0;i + +typedef -typeinput- input_t; +typedef -typeoutput- output_t; + +#define INPUT_MAX_SAMPLES -sizeinput- +#define OUTPUT_MAX_SAMPLES -sizeoutput- + +#define NOF_INPUT_ITF -numinputs- +#define NOF_OUTPUT_ITF -numoutputs- + +#endif +/**@} */ + +#ifndef INCLUDE_DEFS_ONLY + +/* Input and output buffer sizes (in number of samples) */ +const int input_max_samples = INPUT_MAX_SAMPLES; +const int output_max_samples = OUTPUT_MAX_SAMPLES; + +/* leave these two lines unmodified */ +const int input_sample_sz = sizeof(input_t); +int output_sample_sz = sizeof(output_t); + +/* Number of I/O interfaces. All have the same maximum size */ +const int nof_input_itf = NOF_INPUT_ITF; +const int nof_output_itf = NOF_OUTPUT_ITF; + +#endif diff --git a/scripts/xml2aloe/template/test/test_generate.c b/scripts/xml2aloe/template/test/test_generate.c new file mode 100644 index 000000000..c21002790 --- /dev/null +++ b/scripts/xml2aloe/template/test/test_generate.c @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2013, Ismael Gomez-Miguelez . + * This file is part of ALOE++ (http://flexnets.upc.edu/) + * + * ALOE++ is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ALOE++ is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ALOE++. If not, see . + */ + +/* Functions that generate the test data fed into the DSP modules being developed */ +#include +#include +#include + +#include +#include + +#define INCLUDE_DEFS_ONLY +#include "template.h" + +int offset=0; + +/** + * Generates input signal. VERY IMPORTANT to fill length vector with the number of + * samples that have been generated. + * @param inp Input interface buffers. Data from other interfaces is stacked in the buffer. + * Use in(ptr,idx) to access the address. + * + * @param lengths Save on n-th position the number of samples generated for the n-th interface + */ +int generate_input_signal(void *in, int *lengths) +{ + int i; + input_t *input = in; + int block_length; + pmid_t blen_id; + + blen_id = param_id("block_length"); + if (!blen_id) { + moderror("Parameter block_length not found\n"); + return -1; + } + if (!param_get_int(blen_id,&block_length)) { + moderror("Getting integer parameter block_length\n"); + return -1; + } + + modinfo_msg("Parameter block_length is %d\n",block_length); + + + /** HERE INDICATE THE LENGTH OF THE SIGNAL */ + lengths[0] = block_length; + + for (i=0;i + +int uhd_open(char *args, void **handler); +int uhd_close(void *h); +int uhd_start_rx_stream(void *h); +int uhd_start_rx_stream_nsamples(void *h, int nsamples); +int uhd_stop_rx_stream(void *h); + +bool uhd_rx_wait_lo_locked(void *h); +double uhd_set_rx_srate(void *h, double freq); +double uhd_set_rx_gain(void *h, double gain); + +double uhd_set_rx_freq(void *h, double freq); + +int uhd_recv(void *h, void *data, int nsamples, int blocking); + + +#ifdef __cplusplus +} +#endif diff --git a/uhd/uhd_handler.hpp b/uhd/uhd_handler.hpp new file mode 100644 index 000000000..633ef00d0 --- /dev/null +++ b/uhd/uhd_handler.hpp @@ -0,0 +1,9 @@ +#include + +class uhd_handler { +public: + uhd::usrp::multi_usrp::sptr usrp; + uhd::rx_streamer::sptr rx_stream; + bool rx_stream_enable; + +}; diff --git a/uhd/uhd_imp.cpp b/uhd/uhd_imp.cpp new file mode 100644 index 000000000..69d285b7c --- /dev/null +++ b/uhd/uhd_imp.cpp @@ -0,0 +1,140 @@ +#include +#include +#include +#include +#include + +void my_handler(uhd::msg::type_t type, const std::string &msg){ + //handle the message... +} + +#include "uhd_handler.hpp" +#include "uhd.h" + +typedef _Complex float complex_t; + +#define SAMPLE_SZ sizeof(complex_t) + +void uhd_rx_stream(void *h); + +bool isLocked(void *h) +{ + uhd_handler* handler = static_cast(h); + return handler->usrp->get_rx_sensor("lo_locked", 0).to_bool(); +} + +bool uhd_rx_wait_lo_locked(void *h) +{ + + double report = 0.0; + while(isLocked(h) && report < 3.0) + { + report += 0.1; + usleep(1000); + } + return isLocked(h); +} + +int uhd_start_rx_stream(void *h) { + uhd_handler* handler = static_cast(h); + uhd::stream_cmd_t cmd(uhd::stream_cmd_t::STREAM_MODE_START_CONTINUOUS); + cmd.time_spec = handler->usrp->get_time_now(); + cmd.stream_now = true; + handler->usrp->issue_stream_cmd(cmd); + return 0; +} + +int uhd_stop_rx_stream(void *h) { + uhd_handler* handler = static_cast(h); + uhd::stream_cmd_t cmd(uhd::stream_cmd_t::STREAM_MODE_STOP_CONTINUOUS); + cmd.time_spec = handler->usrp->get_time_now(); + cmd.stream_now = true; + handler->usrp->issue_stream_cmd(cmd); + return 0; +} + +int uhd_start_rx_stream_nsamples(void *h, int nsamples) { + uhd_handler* handler = static_cast(h); + uhd::stream_cmd_t cmd(uhd::stream_cmd_t::STREAM_MODE_NUM_SAMPS_AND_MORE); + cmd.time_spec = handler->usrp->get_time_now(); + cmd.stream_now = true; + cmd.num_samps = nsamples; + handler->usrp->issue_stream_cmd(cmd); + return 0; +} + + + +int uhd_open(char *args, void **h) { + uhd_handler* handler = new uhd_handler(); + std::string _args=std::string(args); + handler->usrp = uhd::usrp::multi_usrp::make(_args); + + uhd::msg::register_handler(&my_handler); + + std::string otw, cpu; + otw="sc16"; + cpu="fc32"; + + handler->usrp->set_clock_source("internal"); + + uhd::stream_args_t stream_args(cpu, otw); +// stream_args.channels.push_back(0); +// stream_args.args["noclear"] = "1"; + + handler->rx_stream = handler->usrp->get_rx_stream(stream_args); + *h = handler; + + int size = 10000*handler->rx_stream->get_max_num_samps(); + + return 0; +} + +int uhd_close(void *h) { + uhd_handler* handler = static_cast(h); + return 0; +} + + +double uhd_set_rx_srate(void *h, double freq) { + uhd_handler* handler = static_cast(h); + handler->usrp->set_rx_rate(freq); + double ret = handler->usrp->get_rx_rate(); + return ret; +} + +double uhd_set_rx_gain(void *h, double gain) { + uhd_handler* handler = static_cast(h); + handler->usrp->set_rx_gain(gain); + return handler->usrp->get_rx_gain(); +} + +float uhd_get_rx_srate(void *h) { + uhd_handler* handler = static_cast(h); + return handler->usrp->get_tx_rate(); +} + +double uhd_set_rx_freq(void *h, double freq) { + uhd_handler* handler = static_cast(h); + handler->usrp->set_rx_freq(freq); + return handler->usrp->get_rx_freq(); +} + +int uhd_recv(void *h, void *data, int nsamples, int blocking) { + uhd_handler* handler = static_cast(h); + uhd::rx_metadata_t md; + if (blocking) { + int n=0,p; + complex_t *data_c = (complex_t*) data; + do { + p=handler->rx_stream->recv(&data_c[n], nsamples-n, md); + if (p == -1) { + return -1; + } + n+=p; + } while(nrx_stream->recv(data, nsamples, md, 0.0); + } +} diff --git a/uhd/uhd_utils.c b/uhd/uhd_utils.c new file mode 100644 index 000000000..173164cb5 --- /dev/null +++ b/uhd/uhd_utils.c @@ -0,0 +1,47 @@ +#include +#include +#include +#include + +#include "uhd.h" +#include "utils/vector.h" + +int uhd_rssi_scan(void *uhd, float *freqs, float *rssi, int nof_bands, double fs, int nsamp) { + int i; + int ret = -1; + _Complex float *buffer; + double f; + + buffer = calloc(nsamp, sizeof(_Complex float)); + if (!buffer) { + goto free_and_exit; + } + + uhd_set_rx_gain(uhd, 0.0); + uhd_set_rx_srate(uhd, fs); + + for (i=0;i