00001
00007 #include <stdio.h>
00008 #include <time.h>
00009 #include <sys/types.h>
00010
00011 #include "ipc.h"
00012 #include "cmdparam.h"
00013 #include "ind_types.h"
00014 #include "lissom.h"
00015 #include "globals.h"
00016 #include "kernel.h"
00017 #include "analyze.h"
00018 #include "binarysave.h"
00019
00020
00021
00022
00023
00024
00025
00026 00027 00028
00029
00030
00031
00032
00033
00035 #define LF_BUFFER_VERIFY
00036
00042 #define NEURONS_PER_BUFFER 1
00043
00044 #define PARTS_PER_ROW (NMAX/NEURONS_PER_BUFFER)
00045 #define parts_per_row (N/NEURONS_PER_BUFFER)
00046
00048 typedef f32 LFFloatType;
00049 #define LF_FLOAT_SIZE sizeof(LFFloatType)
00050
00051
00052 typedef struct{
00053 i32 right;
00054 i32 left;
00055 }LFTokenType;
00056
00057
00058
00059 #define SMALLEST_LFTOK 1
00060 #define LFTOK_NO_TOKEN 0
00061 #define LFTOK_SNAPSHOT_HEADER 1
00062 #define LFTOK_ROW_HEADER 2
00063 #define LFTOK_NEURON_HEADER_OLD 3
00064 #define LFTOK_AFF_WEIGHTS 4
00065 #define LFTOK_LEXC_WEIGHTS_OLD 5
00066 #define LFTOK_LINH_WEIGHT_CHUNK 6
00067 #define LFTOK_NEURON_TRAILER 7
00068 #define LFTOK_SNAPSHOT_TRAILER 8
00069 #define LFTOK_LEXC_WEIGHTS 9
00070 #define LFTOK_NEURON_HEADER 10
00071 #define LARGEST_LFTOK 10
00072
00074 #define LF_UNUSED 0x00
00075
00076 #define LF_ALL_EYES 99
00077
00083 #ifndef LF_BUFFER_SIZE
00084 #define LF_BUFFER_SIZE \
00085 (int)(1.02 * (NMAX/PARTS_PER_ROW) * ( 2*WTMAX*WTMAX + MAX_EXC_DIMENSION + 1.0*MAX_INH_DIMENSION ))
00086 #endif
00087
00093 #define LF_TOKEN_GRAIN_SIZE 4
00094
00095 typedef i32 LFBufferDataType;
00096 #define LFBufferDataType_swap_endian i32_swap_endian
00097
00098
00099 typedef struct
00100 {
00101 int start;
00102 int end;
00103 LFBufferDataType data[LF_BUFFER_SIZE];
00104 } LFBuffer;
00105
00106 #define LF_FLOAT_TOLERANCE 0.0000001
00107
00108
00109 #define LF_NO_ERROR 0
00110 #define LF_ERROR -1
00111
00112 #define LF_MAXWARNINGS 5
00113
00114
00115
00116
00117
00118
00119
00120 int save_afferent_weights_only=False;
00121 int load_afferent_weights_only=False;
00122
00123
00124
00125
00126
00127
00128 int tokensWritten = 0;
00129 int tokensRead = 0;
00131 LFBuffer binaryBuffer;
00132 int status = 0;
00134 int errorsEncountered = 0;
00135
00137 int bigendian = -1;
00138
00139
00140
00141
00142
00143
00144
00145 LFTokenType makeToken(int name, int length, int infoa, int infob );
00146 int tokenName( LFTokenType token );
00147 int tokenLength( LFTokenType token );
00148 int tokenInfoa( LFTokenType token );
00149 int tokenInfob( LFTokenType token );
00150 void printToken( LFTokenType token, int offset );
00151 const char *tokenNameString(LFTokenType token);
00152
00153 int writeToken(FILE *file, LFTokenType token );
00154 LFTokenType peekToken(FILE *file );
00155 LFTokenType readToken(FILE *file );
00156 LFTokenType readTokenExpecting(int tokenNameExpected, FILE *file);
00157 int writeItem(FILE *file, void *itemptr);
00158 LFBufferDataType readItem(FILE* file);
00159 int writeFloat(FILE *file, f32 aFloat );
00160 f32 readFloat(FILE *file);
00161 int writeInteger(FILE *file, int anInteger );
00162 int readInteger(FILE *file);
00163 void printTokensInBuffer( LFBuffer *buffer );
00164
00165 int binaryWeightsFileReadInArbitraryOrder(FILE *file, LFBuffer *buffer);
00166 int binaryWeightsFileReadInFixedOrder(FILE *file, LFBuffer *buffer);
00167
00168 int binaryWeightsBufferFill( LFBuffer *buffer, int current_map_row,
00169 int current_local_row, int part_of_row );
00170
00171 int binaryWeightsBufferProcess( LFBuffer *buffer, int verify);
00172 int binaryWeightsBufferWriteLatWeights( LFBuffer *buffer, int token_name,
00173 int i, int j, int radius, int array_width,
00174 l_weight *weights);
00175 int binaryWeightsBufferReadLatWeights( LFBuffer *buffer, LFTokenType token,
00176 int i, int j, int radius, int array_width,
00177 l_weight *weights, int verify, const char *description);
00178
00179 void LFBufferGet( LFBuffer *buffer, int pe );
00180 void LFBufferPut( LFBuffer *buffer, int pe );
00181 void LFBufferClear( LFBuffer *buffer );
00182 int LFBufferLength( LFBuffer *buffer );
00183 int LFBufferWrite( LFBuffer *buffer, FILE *file );
00184 int LFBufferRead( LFBuffer *buffer, FILE *file, int length);
00185 int LFBufferWriteToken(LFBuffer *buffer, LFTokenType token );
00186 LFTokenType LFBufferReadToken(LFBuffer *buffer );
00187 int LFBufferWriteItem(LFBuffer *buffer, void *itemptr);
00188 LFBufferDataType LFBufferReadItem(LFBuffer *buffer);
00189 int LFBufferWriteFloat(LFBuffer *buffer, f32 aFloat );
00190 f32 LFBufferReadFloat(LFBuffer *buffer);
00191 int LFBufferWriteInteger(LFBuffer *buffer, int anInteger );
00192 int LFBufferReadInteger( LFBuffer *buffer);
00193 int compareParameter(int weight_file_parameter, int param_file_parameter, const char *description);
00194 int compareOrSetInt(int *destination, int value, int verify, const char *description);
00195 #define compareOrSet_proto(type) \
00196 type compareOrSet_ ## type(type *destination, type value, int verify, const char *description)
00197 compareOrSet_proto(f64);
00198 compareOrSet_proto(a_weight);
00199 compareOrSet_proto(l_weight);
00200
00201
00202
00203
00204
00205
00206
00207
00208 void binarysave_init_hook( void )
00209 {
00210 PARAM_I(PARAM_BOOL, load_afferent_weights_only,False,True,
00211 "If true, when loading weights, load only the afferent ones. This allows\n"
00212 "a machine with little memory to read in a file created on a much larger one,\n"
00213 "though all it can do is compute initial activations and map values which\n"
00214 "depend on them. Requires LF_BUFFER_SIZE to be large enough to hold at\n"
00215 "least one neuron's worth of data from the full network.");
00216
00217 PARAM_I(PARAM_BOOL, save_afferent_weights_only,False,True,
00218 "If true, when saving weights, save only the afferent ones. This results\n"
00219 "in a much smaller file, but the resulting network is of only limited use.");
00220 }
00221
00222
00223
00224
00225
00226
00227
00228
00230 int binaryWeightsFileWrite( FILE *file )
00231 {
00232 int current_local_row;
00233 int part_of_row;
00234 time_t start_time, end_time;
00235
00236
00237 tokensWritten = 0;
00238 status = 0;
00239 errorsEncountered = 0;
00240 bigendian = endianness();
00241
00242
00243 if (AMYOUNGESTPE) {
00244 start_time = time(NULL);
00245 ipc_notify(IPC_ALL,IPC_STD,"Weight file saving started %.24s", ctime(&start_time));
00246
00247
00248 writeToken( file, makeToken( LFTOK_SNAPSHOT_HEADER, 2+1, N, nrows));
00249 writeInteger( file, iteration);
00250 }
00251
00252
00253 for (current_local_row=0; current_local_row <nrows; current_local_row++) {
00254 for (part_of_row=0; part_of_row <parts_per_row; part_of_row++) {
00255
00256 const int current_map_row = MAPROW(current_local_row);
00257 LFBufferClear( &binaryBuffer );
00258 binaryWeightsBufferFill( &binaryBuffer, current_map_row, current_local_row, part_of_row );
00259 if (status != 0) return status;
00260
00261
00262 #ifdef LF_BUFFER_VERIFY
00263 tokensRead = 0;
00264 binaryWeightsBufferProcess( &binaryBuffer, True);
00265 if(status != 0) return status;
00266 #endif
00267
00268
00269 ipc_barrier();
00270
00271
00272 if (AMYOUNGESTPE) {
00273 int pe;
00274
00275 for(pe=(NPEs-1); pe>=0; pe--){
00276
00277 if (!PEISME(pe))
00278 LFBufferGet(&binaryBuffer, pe);
00279
00280
00281 writeToken( file, makeToken( LFTOK_ROW_HEADER, 2 + 1,
00282 ARBITRARY_MAPROW(current_local_row,pe), LF_UNUSED));
00283 writeInteger(file, LFBufferLength(&binaryBuffer));
00284
00285
00286 LFBufferWrite(&binaryBuffer,file);
00287 }
00288 }
00289
00290
00291 ipc_barrier();
00292 }
00293 }
00294
00295 if (AMYOUNGESTPE) {
00296
00297
00298 writeToken( file, makeToken( LFTOK_SNAPSHOT_TRAILER, 2 + 1, LF_UNUSED, LF_UNUSED));
00299 writeInteger(file, iteration);
00300
00301 end_time = time(NULL);
00302
00303
00304 ipc_notify(IPC_ALL,IPC_STD,"Weight saving took %g seconds, completing %.24s",
00305 (double)difftime(end_time, start_time),
00306 ctime(&end_time));
00307 }
00308
00309 if (errorsEncountered != 0) {
00310 ipc_notify(IPC_ALL,IPC_ERROR,"There were %d non-fatal errors during saving",errorsEncountered);
00311 ipc_notify(IPC_ALL,IPC_ERROR,"Consult the log file for more information");
00312 }
00313
00314 return LF_NO_ERROR;
00315 }
00316
00317
00318
00320 int binaryWeightsBufferFill( LFBuffer *buffer, int current_map_row,
00321 int current_local_row, int part_of_row )
00322 {
00323 int current_j;
00324 int which_eye;
00325
00326 int start_j = part_of_row * (N/parts_per_row);
00327 int end_j = (part_of_row + 1) * (N/parts_per_row);
00328
00329
00330 if (N%parts_per_row != 0){
00331 ipc_notify(IPC_ALL,IPC_ERROR,"N must be divisible by parts_per_row: %d%%%d != 0",
00332 (int)N, (int)parts_per_row);
00333 return LF_ERROR;
00334
00335 }
00336
00337
00338 for (current_j =start_j; current_j <end_j; current_j++){
00339
00340 if(status != 0) return status;
00341
00342
00343 LFBufferWriteToken(buffer,
00344 makeToken( LFTOK_NEURON_HEADER, 2 + 2 + 2*num_eyes, current_map_row, current_j));
00345
00346 LFBufferWriteInteger(buffer, cortex_map[current_map_row][current_j].centerx);
00347 LFBufferWriteInteger(buffer, cortex_map[current_map_row][current_j].centery);
00348 for (which_eye = 0; which_eye < num_eyes; which_eye++){
00349 #ifdef NO_ANALYZE
00350 LFBufferWriteInteger(buffer, Uninitialized);
00351 #else
00352 LFBufferWriteInteger(buffer, or_pref[which_eye][current_map_row][current_j]);
00353 #endif
00354 }
00355
00356
00357 {
00358 const int rf_width = 1 + 2*rf_radius;
00359
00360
00361 const double wt_scale = num_eyes/2.0;
00362
00363 int x,y;
00364
00365 00366 00367 00368
00369
00370
00371 for (which_eye = 0; which_eye < num_eyes; which_eye++) {
00372 LFBufferWriteToken(buffer, makeToken( LFTOK_AFF_WEIGHTS,
00373 2 + rf_width*rf_width,
00374 rf_width, which_eye));
00375
00376 for (x=0; x<rf_width; x++)
00377 for (y=0; y<rf_width; y++)
00378 LFBufferWriteFloat(buffer, wt_scale *
00379 wts[current_local_row][current_j].weights[which_eye][x][y]);
00380
00381 }
00382 }
00383
00384
00385
00386 if (!save_afferent_weights_only) {
00387
00388
00389 binaryWeightsBufferWriteLatWeights( buffer, LFTOK_LEXC_WEIGHTS,
00390 current_map_row, current_j,
00391 exc_rad, exc_array_width,
00392 wts[current_local_row][current_j].lat_exc_wts);
00393
00394
00395 {
00396 int start_index=0, end_index=-1, i;
00397 int dynamic_lat_inh_dimension = (2*inh_rad + 1)*(2*inh_rad + 1);
00398 f64 live_threshold = 1.5 * DEATH_FLAG;
00399
00400 if (dynamic_lat_inh_dimension != lat_inh_dimension)
00401 ipc_notify(IPC_ONE,IPC_ERROR,"Lateral inhibitory radius changed; not supported by binarysave.c");
00402
00403
00404
00405 for (;;) {
00406 00407 00408 00409 00410
00411
00412
00413 for (start_index = end_index+1;
00414 ((start_index <= dynamic_lat_inh_dimension-1) &&
00415 (wts[current_local_row][current_j].lat_inh_wts[start_index] < live_threshold));
00416 start_index++);
00417
00418
00419 if (start_index >= dynamic_lat_inh_dimension)
00420 break;
00421
00422
00423
00424 for (end_index=start_index;
00425 ((end_index < dynamic_lat_inh_dimension - 2) &&
00426 !((wts[current_local_row][current_j].lat_inh_wts[end_index+1] < live_threshold) &&
00427 (wts[current_local_row][current_j].lat_inh_wts[end_index+2] < live_threshold)));
00428 end_index +=2);
00429
00430
00431 if (end_index >= (dynamic_lat_inh_dimension-2))
00432 end_index = dynamic_lat_inh_dimension - 1;
00433
00434
00435
00436
00437 if (wts[current_local_row][current_j].lat_inh_wts[end_index] < live_threshold)
00438 end_index--;
00439
00440
00441 LFBufferWriteToken(buffer, makeToken( LFTOK_LINH_WEIGHT_CHUNK,
00442 2 + (end_index - start_index + 1),
00443 start_index, end_index));
00444
00445
00446 for (i=start_index; i<=end_index; i++)
00447 LFBufferWriteFloat(buffer, wts[current_local_row][current_j].lat_inh_wts[i]);
00448
00449 };
00450 }
00451 }
00452
00453
00454 LFBufferWriteToken(buffer, makeToken( LFTOK_NEURON_TRAILER, 2, current_map_row, current_j));
00455
00456 }
00457
00458 return LF_NO_ERROR;
00459 }
00460
00461
00462
00464 int binaryWeightsFileRead( FILE *file )
00465 {
00466 LFTokenType thisToken;
00467
00468 time_t start_time, end_time;
00469
00470
00471 tokensRead = 0;
00472 status = 0;
00473 errorsEncountered = 0;
00474 bigendian = endianness();
00475 LFBufferClear( &binaryBuffer );
00476
00477
00478 if (AMPARENTPE){
00479
00480 start_time = time(NULL);
00481 ipc_notify(IPC_ALL,IPC_STD,"Weight file loading started %.24s", ctime(&start_time));
00482
00483
00484 thisToken = readTokenExpecting(LFTOK_SNAPSHOT_HEADER, file);
00485 compareParameter(tokenInfoa(thisToken), N, "N" );
00486 #ifdef ASSUME_FIXED_PE_CONFIGURATION
00487 compareParameter(tokenInfob(thisToken), nrows, "nrows");
00488 #endif
00489
00490
00491 iteration = readInteger(file);
00492 }
00493
00494 #ifdef ASSUME_FIXED_PE_CONFIGURATION
00495 ipc_notify(IPC_ONE,IPC_VERBOSE,"Assuming fixed order of rows in binary weights file");
00496 binaryWeightsFileReadInFixedOrder(file, &binaryBuffer);
00497 #else
00498 binaryWeightsFileReadInArbitraryOrder(file, &binaryBuffer);
00499 #endif
00500
00501
00502 if (AMPARENTPE){
00503 thisToken = readTokenExpecting(LFTOK_SNAPSHOT_TRAILER, file);
00504 if (readInteger(file) != iteration)
00505 ipc_notify(IPC_ALL,IPC_ERROR,"File truncated: End of SNAPSHOT_TRAILER missing");
00506
00507 end_time = time(NULL);
00508
00509
00510 ipc_notify(IPC_ALL,IPC_STD,"Weight loading took %g seconds, completing %.24s",
00511 (double)(difftime(end_time, start_time)),
00512 ctime(&end_time));
00513
00514 }
00515
00516 if (errorsEncountered != 0) {
00517 ipc_notify(IPC_ALL,IPC_ERROR,"There were %d non-fatal errors during loading",errorsEncountered);
00518 ipc_notify(IPC_ALL,IPC_ERROR,"Consult the runfile for more information");
00519 }
00520
00521 return LF_NO_ERROR;
00522 }
00523
00524
00525
00526 int binaryWeightsBufferWriteLatWeights( LFBuffer *buffer, int token_name,
00527 int i, int j, int radius, int array_width,
00528 l_weight *weights)
00529 {
00530 const int lowk = MAX(i-radius,0 );
00531 const int highk = MIN(i+radius,N-1);
00532 const int lowl = MAX(j-radius,0 );
00533 const int highl = MIN(j+radius,N-1);
00534 const int num = (highk-lowk+1)*(highl-lowl+1);
00535
00536 int k,l;
00537
00538 LFBufferWriteToken(buffer, makeToken( token_name, 2+num, num, 0));
00539
00540 for(k=lowk; k<= highk; k++) {
00541 const int partial_idx = PARTIAL_LAT_INDEX(i,j,k,radius,array_width);
00542 for (l=lowl; l<=highl; l++) {
00543 const int idx = FULL_LAT_INDEX(partial_idx,l);
00544
00545 LFBufferWriteFloat(buffer, weights[idx]);
00546 }
00547 }
00548
00549 return LF_NO_ERROR;
00550 }
00551
00552
00553
00554 int binaryWeightsBufferReadLatWeights( LFBuffer *buffer, LFTokenType token,
00555 int i, int j, int radius, int array_width,
00556 l_weight *weights, int verify, const char *description)
00557 {
00558 const int lowk = MAX(i-radius,0 );
00559 const int highk = MIN(i+radius,N-1);
00560 const int lowl = MAX(j-radius,0 );
00561 const int highl = MIN(j+radius,N-1);
00562 const int num = (highk-lowk+1)*(highl-lowl+1);
00563
00564 int k,l;
00565
00566 if (load_afferent_weights_only) {
00567
00568 int idx;
00569 for(idx=0; idx<tokenInfoa(token); idx++)
00570 LFBufferReadFloat(buffer);
00571 return LF_NO_ERROR;
00572 }
00573
00574 if (num != tokenInfoa(token))
00575 ipc_abort(IPC_EXIT_FILE_PROBLEM,"Problem with lateral weight encoding; current parameters may not match file");
00576
00577 for(k=lowk; k<= highk; k++) {
00578 const int partial_idx = PARTIAL_LAT_INDEX(i,j,k,radius,array_width);
00579 for (l=lowl; l<=highl; l++) {
00580 const int idx = FULL_LAT_INDEX(partial_idx,l);
00581 const l_weight aFloat = LFBufferReadFloat(buffer);
00582
00583 compareOrSet_l_weight(&weights[idx], aFloat, verify, description);
00584 }
00585 }
00586
00587 return LF_NO_ERROR;
00588 }
00589
00590
00591
00598 int binaryWeightsFileReadInFixedOrder(FILE *file, LFBuffer *buffer)
00599 {
00600 int current_map_row;
00601 int current_local_row;
00602 int part_of_row;
00603 int row_length;
00604 LFTokenType thisToken;
00605
00606
00607 for (current_local_row =0; current_local_row <nrows; current_local_row++) {
00608 for (part_of_row=0; part_of_row <parts_per_row; part_of_row++) {
00609
00610 if (AMPARENTPE){
00611 int pe;
00612 int rowsRead = 0;
00613
00614
00615 do
00616 {
00617 thisToken = readTokenExpecting(LFTOK_ROW_HEADER, file);
00618
00619 current_map_row = tokenInfoa(thisToken);
00620 row_length = readInteger(file);
00621 LFBufferClear( buffer );
00622 LFBufferRead( buffer, file, row_length);
00623
00624 if ( current_local_row != LOCALROW(current_map_row)) {
00625 status = LF_ERROR;
00626 ipc_notify(IPC_ALL,IPC_ERROR,"Rows are not in the correct major order in file");
00627 }
00628 pe = PEFORROW(current_map_row);
00629 if (!PEISME(pe))
00630 LFBufferPut( buffer, pe);
00631
00632 rowsRead++;
00633
00634 } while (!PEISME(pe));
00635
00636 if ( rowsRead != NPEs) {
00637 status = LF_ERROR;
00638 ipc_notify(IPC_ALL,IPC_ERROR,"Rows are not in the correct minor order in file");
00639 }
00640 }
00641
00642
00643
00644 ipc_barrier();
00645
00646 #ifdef CRAY
00647 #pragma _CRI suppress (buffer)
00648 #endif
00649
00650
00651 binaryWeightsBufferProcess( buffer, False);
00652
00653
00654 ipc_barrier();
00655
00656 if(status != 0) return status;
00657 }
00658 }
00659
00660 return LF_NO_ERROR;
00661 }
00662
00663
00664
00665 int binaryWeightsFileReadInArbitraryOrder(FILE *file, LFBuffer *buffer)
00666 {
00667 static int done;
00668 done=0;
00669
00670 while (!done) {
00671
00672
00673 ipc_barrier();
00674
00675 00676
00677 if (AMPARENTPE) {
00678 LFTokenType nextToken;
00679 int i, ownerpe, pe_has_data[NPES];
00680
00681
00682 for (i=0; i<NPEs; i++)
00683 pe_has_data[i]=False;
00684
00685
00686 nextToken = peekToken(file);
00687 ownerpe = PEFORROW(tokenInfoa(nextToken));
00688 done = (tokenName(nextToken) != LFTOK_ROW_HEADER);
00689
00690
00691 while (!done && !pe_has_data[ownerpe]) {
00692 int row_length;
00693
00694 readTokenExpecting(LFTOK_ROW_HEADER,file);
00695 row_length = readInteger(file);
00696
00697 LFBufferRead( buffer, file, row_length);
00698 pe_has_data[ownerpe]=True;
00699
00700 if (!PEISME(ownerpe)) {
00701 LFBufferPut( buffer, ownerpe);
00702 LFBufferClear( buffer );
00703 }
00704
00705 nextToken = peekToken(file);
00706 ownerpe = PEFORROW(tokenInfoa(nextToken));
00707 done = (tokenName(nextToken) != LFTOK_ROW_HEADER);
00708 }
00709 }
00710
00711
00712 ipc_barrier();
00713 ipc_get( &done, IPC_INT, 1, PARENTPE);
00714
00715
00716 #ifdef CRAY
00717 #pragma _CRI suppress (buffer)
00718 #endif
00719 if (LFBufferLength(buffer)!=0)
00720 binaryWeightsBufferProcess(buffer, False);
00721 }
00722
00723 if(status != 0) return status;
00724 return LF_NO_ERROR;
00725 }
00726
00727
00728
00729 #define MAKE_SURE_NEURON_HEADER_INFO_AVAILABLE \
00730 if (current_map_row == Uninitialized || current_j == Uninitialized) { \
00731 ipc_notify(IPC_ALL,IPC_ERROR,"Missing neuron header"); \
00732 return LF_ERROR; \
00733 }
00734
00735
00736
00738 int binaryWeightsBufferProcess( LFBuffer *buffer, int verify)
00739 {
00740 LFTokenType thisToken;
00741 int max_eye_data_found=-1;
00742
00743 #ifdef LF_DEBUG_FILE
00744 int offset;
00745 #endif
00746
00747 int current_local_row = Uninitialized;
00748 int current_map_row = Uninitialized;
00749 int current_j = Uninitialized;
00750
00751
00752
00753 int bufferStart = buffer->start;
00754 int bufferEnd = buffer->end;
00755
00756
00757 do
00758 {
00759 if(status != LF_NO_ERROR) return status;
00760
00761 #ifdef LF_DEBUG_FILE
00762 offset = buffer->start;
00763 #endif
00764 thisToken = LFBufferReadToken(buffer);
00765 #ifdef LF_DEBUG_FILE
00766 printToken( thisToken, offset );
00767 #endif
00768
00769 switch(tokenName(thisToken)){
00770
00771 case LFTOK_NEURON_HEADER_OLD:
00772 case LFTOK_NEURON_HEADER:
00773
00774 if (current_map_row != Uninitialized || current_j != Uninitialized) {
00775 ipc_notify(IPC_ALL,IPC_ERROR,"Missing neuron trailer");
00776 return LF_ERROR;
00777 }
00778
00779 current_map_row = tokenInfoa(thisToken);
00780 current_j = tokenInfob(thisToken);
00781 current_local_row = LOCALROW(current_map_row);
00782
00783 compareOrSetInt(&cortex_map[current_map_row][current_j].centerx,
00784 LFBufferReadInteger(buffer),
00785 verify, "centerx");
00786 compareOrSetInt(&cortex_map[current_map_row][current_j].centery,
00787 LFBufferReadInteger(buffer),
00788 verify, "centery");
00789
00790 max_eye_data_found=-1;
00791
00792 if (tokenName(thisToken)==LFTOK_NEURON_HEADER) {
00793 int which_eye;
00794 for (which_eye = 0; which_eye < num_eyes; which_eye++) {
00795 #ifdef NO_ANALYZE
00796 LFBufferReadInteger(buffer);
00797 #else
00798 compareOrSetInt(&or_pref[which_eye][current_map_row][current_j],
00799 LFBufferReadInteger(buffer),
00800 verify, "orientation preference");
00801 #endif
00802 }
00803 }
00804
00805 break;
00806
00807 case LFTOK_AFF_WEIGHTS:
00808 {
00809 int x,y,eye;
00810 const int rf_width = tokenInfoa(thisToken);
00811 const int which_eye = tokenInfob(thisToken);
00812 const int eye_start = (which_eye==LF_ALL_EYES ? 0 : which_eye);
00813 const int eye_bound = (which_eye==LF_ALL_EYES ? num_eyes : which_eye+1);
00814 const double wt_scale = 2.0/num_eyes;
00815
00816 if (which_eye!=LF_ALL_EYES)
00817 max_eye_data_found = MAX(max_eye_data_found,which_eye);
00818
00819 compareParameter((rf_width-1)/2, rf_radius, "rf_radius" );
00820
00821 MAKE_SURE_NEURON_HEADER_INFO_AVAILABLE;
00822
00823 for (x=0; x<rf_width; x++)
00824 for (y=0; y<rf_width; y++) {
00825 a_weight aFloat = wt_scale * LFBufferReadFloat(buffer);
00826
00827 for (eye=eye_start; eye<eye_bound; eye++)
00828 compareOrSet_a_weight(&wts[current_local_row][current_j].weights[eye][x][y],
00829 aFloat, verify, "afferent weight");
00830 }
00831 }
00832 break;
00833
00834
00835 case LFTOK_LEXC_WEIGHTS:
00836 MAKE_SURE_NEURON_HEADER_INFO_AVAILABLE;
00837 binaryWeightsBufferReadLatWeights( buffer, thisToken,
00838 current_map_row, current_j,
00839 exc_rad, exc_array_width,
00840 wts[current_local_row][current_j].lat_exc_wts,
00841 verify,"lateral excitatory weight");
00842 break;
00843
00844 case LFTOK_LEXC_WEIGHTS_OLD:
00845 {
00846 int start_index, end_index, i;
00847 static int ignoring_weights = False;
00848
00849 start_index = tokenInfoa(thisToken);
00850 end_index = tokenInfob(thisToken);
00851
00852 MAKE_SURE_NEURON_HEADER_INFO_AVAILABLE;
00853
00854 for (i=start_index; i<=end_index; i++) {
00855 l_weight aFloat = LFBufferReadFloat(buffer);
00856
00857 if (!ignoring_weights && (end_index > lat_exc_dimension)) {
00858 if (!load_afferent_weights_only)
00859 ipc_notify(IPC_ALL,IPC_WARNING,"Not enough space for the lateral excitatory weights; ignoring them");
00860 ignoring_weights = True;
00861 }
00862
00863 if (!ignoring_weights && !load_afferent_weights_only)
00864 compareOrSet_l_weight(&wts[current_local_row][current_j].lat_exc_wts[i],
00865 aFloat, verify, "lateral excitatory weight");
00866 }
00867 }
00868 break;
00869
00870
00871 case LFTOK_LINH_WEIGHT_CHUNK:
00872 {
00873 int i;
00874 int start_index, end_index;
00875 static int ignoring_weights = False;
00876
00877 start_index = tokenInfoa(thisToken);
00878 end_index = tokenInfob(thisToken);
00879
00880 MAKE_SURE_NEURON_HEADER_INFO_AVAILABLE;
00881
00882 for (i=start_index; i<=end_index; i++) {
00883 l_weight aFloat = LFBufferReadFloat(buffer);
00884
00885 if (!ignoring_weights && (end_index > lat_inh_dimension)) {
00886 if (!load_afferent_weights_only)
00887 ipc_notify(IPC_ALL,IPC_WARNING,"Not enough space for the lateral inhibitory weights; ignoring them");
00888 ignoring_weights = True;
00889 }
00890
00891 if (!ignoring_weights && !load_afferent_weights_only)
00892 compareOrSet_l_weight(&wts[current_local_row][current_j].lat_inh_wts[i],
00893 aFloat, verify, "lateral inhibitory weight");
00894 }
00895 }
00896 break;
00897
00898
00899 case LFTOK_NEURON_TRAILER:
00900 if ((tokenInfoa(thisToken) != current_map_row) ||
00901 (tokenInfob(thisToken) != current_j)) {
00902 status = LF_ERROR;
00903 ipc_notify(IPC_ALL,IPC_ERROR,"File format error -- neuron trailer");
00904 }
00905
00906 00907
00908 if (max_eye_data_found >= 0 && num_eyes < max_eye_data_found+1) {
00909 static int warned=False;
00910 if (!warned) {
00911 ipc_notify(IPC_ONE,IPC_CAUTION,"Data in file was for %d eyes; ignoring eyes larger than %d",
00912 max_eye_data_found+1,num_eyes-1);
00913 warned=True;
00914 }
00915 }
00916
00917 if (max_eye_data_found >= 0 && num_eyes > max_eye_data_found+1) {
00918 int x,y,eye;
00919 static int warned=False;
00920
00921 if (!warned) {
00922 ipc_notify(IPC_ONE,IPC_CAUTION,"Data in file was for %d eyes; extending to %d eyes",
00923 max_eye_data_found+1,num_eyes);
00924 warned=True;
00925 }
00926
00927 for (x=0; x<1+rf_radius*2; x++)
00928 for (y=0; y<1+rf_radius*2; y++)
00929 for (eye=max_eye_data_found+1; eye<num_eyes; eye++)
00930 wts[current_local_row][current_j].weights[eye][x][y] =
00931 wts[current_local_row][current_j].weights[max_eye_data_found][x][y];
00932 }
00933
00934 current_map_row = Uninitialized;
00935 current_local_row = Uninitialized;
00936 current_j = Uninitialized;
00937 break;
00938
00939
00940 default:
00941 status = LF_ERROR;
00942 ipc_notify(IPC_ALL,IPC_ERROR,"Error in input file -- unknown token: %ld",
00943 (long)tokenName(thisToken));
00944 }
00945 } while (LFBufferLength(buffer) > 0);
00946
00947
00948 LFBufferClear( buffer );
00949
00950
00951 if (verify){
00952 buffer->start = bufferStart;
00953 buffer->end = bufferEnd;
00954 }
00955
00956 return LF_NO_ERROR;
00957 }
00958
00959
00960
00962 void printTokens( FILE *file )
00963 {
00964 int offset;
00965 LFTokenType token;
00966 f32 float32;
00967 i32 int32;
00968 int i;
00969 bigendian = endianness();
00970
00971
00972 if (AMPARENTPE)
00973 do {
00974 offset = ftell(file);
00975
00976 token = readToken(file);
00977
00978 printToken( token, offset );
00979
00980 if ( (tokenName(token) >= SMALLEST_LFTOK) && (tokenName(token) <= LARGEST_LFTOK) )
00981 for(i=2; i<tokenLength(token); i++) {
00982 offset = ftell(file);
00983 int32 = (i32)readInteger(file);
00984 fseek(file, offset, SEEK_SET);
00985 float32 = (f32)readFloat(file);
00986
00987 ipc_notify(IPC_ALL,IPC_STD,"Offset: %#6x Hex: %10x Int: %11d Float: %e",
00988 (int)offset,(int)int32,(int)int32,(double)float32);
00989 }
00990 }
00991 while (tokenName(token) != LFTOK_SNAPSHOT_TRAILER);
00992 }
00993
00994
00995
00996 void printTokensInBuffer( LFBuffer *buffer )
00997 {
00998 int offset;
00999 LFTokenType token;
01000 f32 float32;
01001 i32 int32;
01002 int i;
01003 int bufferStart = buffer->start;
01004 int bufferEnd = buffer->end;
01005
01006
01007 if (AMPARENTPE)
01008 do {
01009 offset = buffer->start;
01010
01011 token = LFBufferReadToken(buffer);
01012
01013 printToken( token, offset );
01014
01015 if ( (tokenName(token) >= SMALLEST_LFTOK) && (tokenName(token) <= LARGEST_LFTOK) )
01016 for(i=2; i<tokenLength(token); i++) {
01017 offset = buffer->start;
01018 int32 = (i32)LFBufferReadInteger(buffer);
01019 buffer->start = offset;
01020 float32 = (f32)LFBufferReadFloat(buffer);
01021
01022 ipc_notify(IPC_ALL,IPC_STD,"Offset: %#6x Hex: %10x Int: %11d Float: %e",
01023 (int)offset,(int)int32,(int)int32,(double)float32);
01024 }
01025 }
01026 while (LFBufferLength(buffer)>0);
01027
01028 buffer->start = bufferStart;
01029 buffer->end = bufferEnd;
01030 }
01031
01032
01033
01034
01035
01036
01037
01042 LFTokenType makeToken(int name, int length, int infoa, int infob )
01043 {
01044 LFTokenType temp;
01045 temp.right=
01046 ( ((i32)infob & 0xFFFF) ) |
01047 ( ((i32)infoa & 0xFFFF) << 16);
01048
01049 temp.left=
01050 ( ((i32)length & 0xFFFF) ) |
01051 ( ((i32)name & 0xFFFF) << 16);
01052
01053 return temp;
01054 }
01055
01056
01057
01058
01059 int tokenName( LFTokenType token ) { return (int)((token.left >> 16) ); }
01060 int tokenLength( LFTokenType token ) { return (int)((token.left ) & 0xFFFF); }
01061 int tokenInfoa( LFTokenType token ) { return (int)((token.right >> 16) & 0xFFFF); }
01062 int tokenInfob( LFTokenType token ) { return (int)((token.right ) & 0xFFFF); }
01063
01064
01065
01066 const char *tokenNameString(LFTokenType token)
01067 {
01068 static char buf[20];
01069
01070 switch (tokenName(token)) {
01071 case LFTOK_SNAPSHOT_HEADER: return("SNAPSHOT_HEADER");
01072 case LFTOK_ROW_HEADER: return("ROW_HEADER");
01073 case LFTOK_NEURON_HEADER_OLD: return("NEURON_HEADER_OLD");
01074 case LFTOK_NEURON_HEADER: return("NEURON_HEADER");
01075 case LFTOK_AFF_WEIGHTS: return("AFF_WEIGHTS");
01076 case LFTOK_LEXC_WEIGHTS_OLD: return("LEXC_WEIGHTS_OLD");
01077 case LFTOK_LEXC_WEIGHTS: return("LEXC_WEIGHTS");
01078 case LFTOK_LINH_WEIGHT_CHUNK: return("LINH_WEIGHT_CHUNK");
01079 case LFTOK_NEURON_TRAILER: return("NEURON_TRAILER");
01080 case LFTOK_SNAPSHOT_TRAILER: return("SNAPSHOT_TRAILER");
01081 default:
01082 sprintf(buf,"Token: %4d", (int)tokenName(token));
01083 return buf;
01084 }
01085 }
01086
01087
01088
01089 void printToken( LFTokenType token, int offset )
01090 {
01091 ipc_notify(IPC_ALL,IPC_STD,"Offset: %#6x %-18s Length: %3d Infoa: %3d Infob: %3d",
01092 offset,tokenNameString(token),(int)tokenLength(token),
01093 (int)tokenInfoa(token),(int)tokenInfob(token));
01094 }
01095
01096
01097
01098
01099
01100
01101
01103 void LFBufferGet( LFBuffer *buffer, int pe )
01104 {
01105 ipc_get64(&(buffer->start), 1, pe);
01106 ipc_get64(&(buffer->end), 1, pe);
01107 ipc_get32(&(buffer->data[0]), buffer->end + 1, pe);
01108 }
01109
01110
01111
01113 void LFBufferPut( LFBuffer *buffer, int pe )
01114 {
01115 ipc_put64(&(buffer->start), 1, pe);
01116 ipc_put64(&(buffer->end), 1, pe);
01117 ipc_put32(&(buffer->data[0]), buffer->end + 1, pe);
01118 }
01119
01120
01121
01123 void LFBufferClear( LFBuffer *buffer )
01124 {
01125 buffer->start = 0;
01126 buffer->end = -1;
01127 }
01128
01129
01130
01132 int LFBufferLength( LFBuffer *buffer )
01133 { return (buffer->end - buffer->start + 1); }
01134
01135
01136
01138 int LFBufferWrite( LFBuffer *buffer, FILE *file )
01139 {
01140 buffer->start += fwrite(&(buffer->data[buffer->start]), sizeof(buffer->data[0]),
01141 LFBufferLength(buffer), file);
01142
01143 if ((buffer->end - buffer->start + 1) !=0){
01144 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot write buffer to file -- Disk or quota full?");
01145 return LF_ERROR;
01146 }
01147
01148 return LF_NO_ERROR;
01149 }
01150
01151
01152
01156 int LFBufferRead( LFBuffer *buffer, FILE *file, int length)
01157 {
01158 if (LFBufferLength(buffer) + length >= LF_BUFFER_SIZE) {
01159 ipc_notify(IPC_ALL,IPC_ERROR,"Binary saving buffer is not large enough (%d >= %d); aborting read",
01160 buffer->end + length, LF_BUFFER_SIZE);
01161 return LF_ERROR;
01162 }
01163
01164 buffer->end += fread( &buffer->data[buffer->end + 1],
01165 sizeof(buffer->data[0]), length, file);
01166
01167 return LF_NO_ERROR;
01168 }
01169
01170
01171
01173 int LFBufferWriteToken(LFBuffer *buffer, LFTokenType token )
01174 {
01175 i32 words[2];
01176 words[0] = token.left;
01177 words[1] = token.right;
01178
01179 if (!bigendian) {
01180 words[0] = i32_swap_endian(words[0]);
01181 words[1] = i32_swap_endian(words[1]);
01182 }
01183
01184 if ((buffer->end+3)> LF_BUFFER_SIZE) {
01185 ipc_notify(IPC_ALL,IPC_ERROR,"Buffer is full -- cannot write token");
01186 return LF_ERROR;
01187 }
01188
01189 buffer->data[++buffer->end] = words[0];
01190 buffer->data[++buffer->end] = words[1];
01191
01192 tokensWritten++;
01193
01194 return LF_NO_ERROR;
01195 }
01196
01197
01198
01200 LFTokenType LFBufferReadToken(LFBuffer *buffer )
01201 {
01202 LFTokenType token;
01203 i32 words[2];
01204
01205 01206
01207
01208 if ((buffer->end - buffer-> start) < 1)
01209 ipc_notify(IPC_ALL,IPC_ERROR,"Buffer is empty -- cannot read token");
01210
01211 words[0] = buffer->data[buffer->start++];
01212 words[1] = buffer->data[buffer->start++];
01213 if (!bigendian) {
01214 words[0] = i32_swap_endian(words[0]);
01215 words[1] = i32_swap_endian(words[1]);
01216 }
01217
01218 token.left = words[0];
01219 token.right = words[1];
01220
01221 tokensRead++;
01222
01223 if ( (tokenName(token) < SMALLEST_LFTOK) || (tokenName(token) > LARGEST_LFTOK) )
01224 ipc_notify(IPC_ALL,IPC_WARNING,"Unknown token: %08x %08x", (int)token.left, (int)token.right);
01225
01226 return token;
01227 }
01228
01229
01231 int LFBufferWriteItem(LFBuffer *buffer, void *itemptr)
01232 {
01233
01234 LFBufferDataType item = *(LFBufferDataType *)itemptr;
01235 if (!bigendian)
01236 item = LFBufferDataType_swap_endian(item);
01237
01238
01239 if ((buffer->end+1)> LF_BUFFER_SIZE){
01240 ipc_notify(IPC_ALL,IPC_ERROR,"Buffer is full -- cannot write item");
01241 return LF_ERROR;
01242 }
01243
01244
01245 buffer->data[++buffer->end] = item;
01246 return LF_NO_ERROR;
01247 }
01248
01249
01250
01252 LFBufferDataType LFBufferReadItem(LFBuffer *buffer)
01253 {
01254 LFBufferDataType item;
01255
01256
01257 if ((buffer->end - buffer->start) < 0){
01258 ipc_notify(IPC_ALL,IPC_ERROR,"Buffer is empty -- cannot read integer");
01259 return 0;
01260 }
01261
01262
01263 item = buffer->data[buffer->start++];
01264 if (!bigendian)
01265 item = LFBufferDataType_swap_endian(item);
01266
01267
01268 return item;
01269 }
01270
01271
01272
01275 int LFBufferWriteFloat(LFBuffer *buffer, f32 aFloat )
01276 {
01277 assert(sizeof(f32)==sizeof(LFBufferDataType));
01278 return LFBufferWriteItem(buffer,&aFloat);
01279 }
01280
01281
01282
01284 f32 LFBufferReadFloat(LFBuffer *buffer)
01285 {
01286 LFBufferDataType item = LFBufferReadItem(buffer);
01287 assert(sizeof(f32)==sizeof(LFBufferDataType));
01288 return *(f32 * )(void *)(&item);
01289 }
01290
01291
01292
01295 int LFBufferWriteInteger(LFBuffer *buffer, int anInteger )
01296 {
01297 i32 output=(i32)anInteger;
01298 assert(sizeof(i32)==sizeof(LFBufferDataType));
01299 return LFBufferWriteItem(buffer,&output);
01300 }
01301
01302
01303
01305 int LFBufferReadInteger(LFBuffer *buffer)
01306 { return (int) LFBufferReadItem(buffer); }
01307
01308
01309
01310
01311
01312
01313
01315 int writeToken(FILE *file, LFTokenType token )
01316 {
01317 i32 words[2];
01318 words[0] = token.left;
01319 words[1] = token.right;
01320
01321 if (!bigendian) {
01322 words[0] = i32_swap_endian(words[0]);
01323 words[1] = i32_swap_endian(words[1]);
01324 }
01325
01326 if (2 != fwrite( &words, sizeof(words[0]), 2, file)){
01327 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot write to file");
01328 return LF_ERROR;
01329 }
01330 tokensWritten++;
01331
01332 return LF_NO_ERROR;
01333 }
01334
01335
01336
01338 LFTokenType readToken(FILE *file )
01339 {
01340 LFTokenType token;
01341 i32 words[2];
01342
01343 if (2 != fread( &words, sizeof(words[0]), 2, file))
01344 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot read token from file");
01345
01346 tokensRead++;
01347
01348 if (!bigendian) {
01349 words[0] = i32_swap_endian(words[0]);
01350 words[1] = i32_swap_endian(words[1]);
01351 }
01352
01353 token.left = words[0];
01354 token.right = words[1];
01355
01356 if ( (tokenName(token) < SMALLEST_LFTOK) || (tokenName(token) > LARGEST_LFTOK) )
01357 ipc_notify(IPC_ALL,IPC_WARNING,"Unknown token: %08x %08x", (int)token.left, (int)token.right);
01358
01359 return token;
01360 }
01361
01362
01363
01365 LFTokenType peekToken(FILE *file )
01366 {
01367 LFTokenType token;
01368 i32 words[2];
01369 long position = ftell(file);
01370
01371 if (2 != fread( &words, sizeof(words[0]), 2, file))
01372 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot read token from file");
01373
01374 fseek(file, position, SEEK_SET);
01375
01376 if (!bigendian) {
01377 words[0] = i32_swap_endian(words[0]);
01378 words[1] = i32_swap_endian(words[1]);
01379 }
01380
01381 token.left = words[0];
01382 token.right = words[1];
01383
01384 if ( (tokenName(token) < SMALLEST_LFTOK) || (tokenName(token) > LARGEST_LFTOK) )
01385 ipc_notify(IPC_ALL,IPC_WARNING,"Unknown token: %08x %08x", (int)token.left, (int)token.right);
01386
01387 return token;
01388 }
01389
01390
01391
01393 LFTokenType readTokenExpecting(int tokenNameExpected, FILE *file)
01394 {
01395 int offset = ftell(file);
01396 LFTokenType token;
01397 token=readToken(file);
01398
01399 #ifdef LF_DEBUG_FILE
01400 printToken( token, offset );
01401 #endif
01402
01403 if (tokenName(token) != tokenNameExpected)
01404 ipc_notify(IPC_ALL,IPC_ERROR,"Expected token %s at offset %d but found %d. (Full token: 0x%08x %08x)",
01405 tokenNameString(makeToken(tokenNameExpected,0,0,0)), offset, (int)tokenName(token),
01406 (int)token.left, (int)token.right);
01407
01408 return token;
01409 }
01410
01411
01412
01414 int writeItem(FILE *file, void *itemptr)
01415 {
01416 static int warningprinted = False;
01417 LFBufferDataType item = *(LFBufferDataType *)itemptr;
01418
01419 if (!bigendian)
01420 item = LFBufferDataType_swap_endian(item);
01421
01422 if (1 != fwrite( &item, sizeof(item), 1, file))
01423 if (!warningprinted) {
01424 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot write to file");
01425 warningprinted=True;
01426 }
01427
01428 return LF_NO_ERROR;
01429 }
01430
01431
01432
01434 LFBufferDataType readItem(FILE* file)
01435 {
01436 LFBufferDataType item;
01437 static int warningprinted = False;
01438
01439 if (1 != fread( &item, sizeof(item), 1, file))
01440 if (!warningprinted) {
01441 ipc_notify(IPC_ALL,IPC_ERROR,"Cannot read from file");
01442 warningprinted=True;
01443 }
01444
01445 if (!bigendian)
01446 item = LFBufferDataType_swap_endian(item);
01447
01448 return item;
01449 }
01450
01451
01452
01455 int writeFloat(FILE *file, f32 aFloat )
01456 {
01457 assert(sizeof(f32)==sizeof(LFBufferDataType));
01458 return writeItem(file,&aFloat);
01459 }
01460
01461
01462
01464 f32 readFloat(FILE *file)
01465 {
01466 LFBufferDataType item = readItem(file);
01467 assert(sizeof(f32)==sizeof(LFBufferDataType));
01468 return *(f32 * )(void *)(&item);
01469 }
01470
01471
01472
01475 int writeInteger(FILE *file, int anInteger )
01476 {
01477 i32 output=(i32)anInteger;
01478 assert(sizeof(i32)==sizeof(LFBufferDataType));
01479 return writeItem(file,&output);
01480 }
01481
01482
01483
01485 int readInteger(FILE *file)
01486 { return (int)readItem(file); }
01487
01488
01489
01490
01491
01492
01493
01494
01496 int compareParameter(int weight_file_parameter, int param_file_parameter, const char *description)
01497 {
01498 if (weight_file_parameter != param_file_parameter) {
01499 ipc_notify(IPC_ALL,IPC_ERROR,"Value for %s in weight file (%d) differs from value in param file (%d)",
01500 description, (int)weight_file_parameter, (int)param_file_parameter);
01501 return LF_ERROR;
01502 }
01503
01504 return LF_NO_ERROR;
01505 }
01506
01507
01508
01510 int compareOrSetInt(int *destination, int value, int verify, const char *description)
01511 {
01512 if (!verify)
01513 *destination = value;
01514 else if (*destination != value) {
01515 static int warnings=0;
01516 warnings++;
01517 if (warnings < LF_MAXWARNINGS)
01518 ipc_notify(IPC_ALL,IPC_ERROR,"compareOrSetInt: Value for %s stored in buffer (%d) differs from value in memory (%d)",
01519 description, (int)value, (int)*destination);
01520 else if (warnings == LF_MAXWARNINGS)
01521 ipc_notify(IPC_ALL,IPC_ERROR,"compareOrSetInt: Maximum number of warnings reached (%d)",warnings);
01522 }
01523
01524 return *destination;
01525 }
01526
01527
01528
01532 #define compareOrSet_proc(type) \
01533 type compareOrSet_ ## type (type *destination, type value, int verify, const char *description) \
01534 { \
01535 if (!verify) \
01536 *destination = value; \
01537 else \
01538 \
01539 if ((*destination == 0 && ((*destination - value) > LF_FLOAT_TOLERANCE ))|| \
01540 (*destination != 0 && ((*destination - value)/(*destination)) > LF_FLOAT_TOLERANCE ) ) {\
01541 static int warnings=0; \
01542 warnings++; \
01543 if (warnings < LF_MAXWARNINGS) \
01544 ipc_notify(IPC_ALL,IPC_ERROR,"compareOrSet_" #type ": Value for %s stored in buffer (%e) differs from value in memory (%e)", \
01545 description, (double)value, (double)(*destination)); \
01546 else if (warnings == LF_MAXWARNINGS) \
01547 ipc_notify(IPC_ALL,IPC_ERROR,"compareOrSet_" #type ": Maximum number of warnings reached (%d)",warnings); \
01548 } \
01549 \
01550 return *destination; \
01551 }
01552
01553
01554
01555 compareOrSet_proc(a_weight)
01556 compareOrSet_proc(l_weight)
01557
01558
01559
01560 01561 01562 01563 01564 01565 01566 01567 01568 01569 01570 01571 01572 01573 01574 01575 01576 01577 01578 01579 01580 01581 01582 01583 01584 01585 01586 01587 01588 01589 01590 01591 01592 01593 01594 01595 01596 01597 01598