Soft Computing
Soft Computing
CLASS WORK
SESSIONAL WORK
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
free ( n ) ;
n = temp ;
}
}
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
prev = NULL;
cur = root;
while(cur != NULL) {
prev = cur;
//cur = (value < cur->value) ? cur->left:cur->right;
if(value < cur->value) {
cur = cur->left;
} else {
cur = cur->right;
}
}
if(value < prev->value) {
prev->left = temp;
} else {
prev->right = temp;
}
}
// Level order traversal..
void levelOrderTraversal(mynode *root) {
mynode *queue[100] = {(mynode *)0}; // Important to initialize!
int size = 0;
int queue_pointer = 0;
while(root) {
printf("[%d] ", root->value);
if(root->left) {
queue[size++] = root->left;
}
if(root->right) {
queue[size++] = root->right;
}
root = queue[queue_pointer++];
}
}
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
int i, j;
// Terminating condition for the recursion...
if ( ( r == no_of_queens ) && ( c == 0 ))
{
clrscr();
printf(" (%d-Queen) Set : %d
", no_of_queens, count++);
print( board );
fflush(stdin);
ch = (char)getch();
clrscr();
if(ch == 'e')
exit (0);
printf("M/c in work ! Please Wait...");
}
// Vertical check...
for(i = 0; i < r; i++)
{
if ( board[i][c] == queen)
return;
}
// Horizontal check...
for(j = 0; j < c; j++)
{
if ( board[r][j] == queen)
return;
}
// Left-Diagonal check...
i = r; j = c;
do
{
if ( board[i][j] == queen )
return;
i--; j--;
}
while( i >= 0 && j >= 0 );
// Right-Diagonal check...
i = r; j = c;
do
{
if ( board[i][j] == queen )
return;
i--; j++;
}
while( i >= 0 && j < no_of_queens );
// Placing the queen if the ckecked position is OK...
board[r][c] = queen;
r++;
// This for-loop is used for checking all the columns for each row
//starting from 1 upto the end...
for(int p = 0; p < no_of_queens; p++)
check(r, p, board);
for(int h = 0; h < no_of_queens; h++)
board[r - 1][h] = '-';
}
void print(char board[100][100])
{
for(int i = 0; i < no_of_queens; i++)
{
if(flagrow == 1)
printf("%3d", i + 1);
for(int j = 0; j < no_of_queens; j++)
{
if(board[i][j] == queen)
{
textcolor(RED);
cprintf("%3c", queen);
}
else
{
textcolor(8); //dark gray
cprintf("%3c", 22);
}
}
printf("
");
}
textcolor(7);
if(flagcol == 1)
{ if(flagrow)
printf(" ");
for(i = 0; i < no_of_queens; i++)
printf("%3d", i + 1);
}
gotoxy(62, 1);
printf("Press E to exit.");
textcolor(7);
}
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
For example, the matrices A and B below will be concatenated to form the new
matrix C:
A simple implementation simply constructs a new matrix whose components are
given by
if j < X_width
Z[i][j] = X[i][j]
else
Z[i][j] = Y[i, j X_width]
If no graph libraries are available, simply write a function that will output a tabseparated
list of the input and output sequences to plot. You can then load or paste
this into your favourite spreadsheet program to make the necessary plots.
2. Implement Output and Class conversion functions
This is very simple: implement a function that converts an output matrix to a class
number vector, and another that converts a class number to an output vector.
For example, the output_to_class function will take the following matrix
100
010
001
100
001
and convert it to:
1
2
3
1
3
(The second function will convert the second matrix back to the first matrix).
3. Implement a function to read in data files
For this tutorial you can use the following three files:
iris_training.dat
iris_validation.dat
iris_test.dat
These three files contain samples from the ICU iris dataset, a simple and quite
famous dataset. In each file, samples or contained in rows. Each row has seven
entries, separated by tabs. The first four entries are features of irises (sepal length,
sepal width, petal length, and petal width); the last three is the outputs denoting the
species of iris (setosa, versicolor, and virginica). I have preprocessed the values a bit
to get them in the appropriate ranges.
You must read in the data so that you can treat the inputs of each set as a single
matrix; similarly for the outputs.
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
{ {HI, LO, LO, LO, LO, LO, LO, LO, LO, LO},
{LO, HI, LO, LO, LO, LO, LO, LO, LO, LO},
{LO, LO, HI, LO, LO, LO, LO, LO, LO, LO},
{LO, LO, LO, HI, LO, LO, LO, LO, LO, LO},
{LO, LO, LO, LO, HI, LO, LO, LO, LO, LO},
{LO, LO, LO, LO, LO, HI, LO, LO, LO, LO},
{LO, LO, LO, LO, LO, LO, HI, LO, LO, LO},
{LO, LO, LO, LO, LO, LO, LO, HI, LO, LO},
{LO, LO, LO, LO, LO, LO, LO, LO, HI, LO},
{LO, LO, LO, LO, LO, LO, LO, LO, LO, HI} };
FILE* f;
void InitializeApplication(NET* Net)
{
INT n,i,j;
Net->Eta = 0.001;
Net->Epsilon = 0.0001;
for (n=0; n<NUM_DATA; n++) {
for (i=0; i<Y; i++) {
for (j=0; j<X; j++) {
Input[n][i*X+j] = (Pattern[n][i][j] == 'O') ? HI : LO;
}
}
}
f = fopen("ADALINE.txt", "w");
}
void WriteInput(NET* Net, INT* Input)
{
INT i;
for (i=0; i<N; i++) {
if (i%X == 0) {
fprintf(f, "\n");
}
fprintf(f, "%c", (Input[i] == HI) ? 'O' : ' ');
}
fprintf(f, " -> ");
}
void WriteOutput(NET* Net, INT* Output)
{
INT i;
INT Count, Index;
Count = 0;
for (i=0; i<M; i++) {
if (Output[i] == HI) {
Count++;
Index = i;
}
}
if (Count == 1)
fprintf(f, "%i\n", Index);
else
fprintf(f, "%s\n", "invalid");
}
void FinalizeApplication(NET* Net)
{
fclose(f);
}
/*****************************************************************************
*
INITIALIZATION
******************************************************************************
/
void GenerateNetwork(NET* Net)
{
INT i;
Net->InputLayer = (LAYER*) malloc(sizeof(LAYER));
Net->OutputLayer = (LAYER*) malloc(sizeof(LAYER));
Net->InputLayer->Units = N;
Net->InputLayer->Output = (INT*) calloc(N+1, sizeof(INT));
Net->InputLayer->Output[0] = BIAS;
Net->OutputLayer->Units = M;
Net->OutputLayer->Activation = (REAL*) calloc(M+1, sizeof(REAL));
Net->OutputLayer->Output = (INT*) calloc(M+1, sizeof(INT));
Net->OutputLayer->Error = (REAL*) calloc(M+1, sizeof(REAL));
Net->OutputLayer->Weight = (REAL**) calloc(M+1, sizeof(REAL*));
for (i=1; i<=M; i++) {
Net->OutputLayer->Weight[i] = (REAL*) calloc(N+1, sizeof(REAL));
}
Net->Eta = 0.1;
Net->Epsilon = 0.01;
}
void RandomWeights(NET* Net)
{
INT i,j;
for (i=1; i<=Net->OutputLayer->Units; i++) {
for (j=0; j<=Net->InputLayer->Units; j++) {
Net->OutputLayer->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
void SetInput(NET* Net, INT* Input, BOOL Protocoling)
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = Input[i-1];
}
if (Protocoling) {
WriteInput(Net, Input);
}
}
void GetOutput(NET* Net, INT* Output, BOOL Protocoling)
{
INT i;
Out = Net->InputLayer->Output[j];
Err = Net->OutputLayer->Error[i];
Net->OutputLayer->Weight[i][j] += Net->Eta * Err * Out;
}
}
}
/*****************************************************************************
*
SIMULATINGTHENET
******************************************************************************
/
void SimulateNet(NET* Net, INT* Input, INT* Target, BOOL Training, BOOL
Protocoling)
{
INT Output[M];
SetInput(Net, Input, Protocoling);
PropagateNet(Net);
GetOutput(Net, Output, Protocoling);
ComputeOutputError(Net, Target);
if (Training)
AdjustWeights(Net);
}
/*****************************************************************************
*
MAIN
******************************************************************************
/
void main()
{
NET Net;
REAL Error;
BOOL Stop;
INT n,m;
InitializeRandoms();
GenerateNetwork(&Net);
RandomWeights(&Net);
InitializeApplication(&Net);
do {
Error = 0;
Stop = TRUE;
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input[n], Output[n], FALSE, FALSE);
Error = MAX(Error, Net.Error);
Stop = Stop AND (Net.Error < Net.Epsilon);
}
Error = MAX(Error, Net.Epsilon);
printf("Training %0.0f%% completed ...\n", (Net.Epsilon / Error) * 100);
if (NOT Stop) {
for (m=0; m<10*NUM_DATA; m++) {
n = RandomEqualINT(0, NUM_DATA-1);
SimulateNet(&Net, Input[n], Output[n], TRUE, FALSE);
}
}
} while (NOT Stop);
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input[n], Output[n], FALSE, TRUE);
}
FinalizeApplication(&Net);
}
Simulator Output for the Pattern Recognition Application
OOO
OO
OO
OO
OO
OO
OOO -> 0
O
OO
OO
O
O
O
O -> 1
OOO
OO
O
O
O
O
OOOOO -> 2
OOO
OO
O
OOO
O
OO
OOO -> 3
O
OO
OO
OO
OOOOO
O
O -> 4
OOOOO
O
O
OOOO
O
OO
OOO -> 5
OOO
OO
O
OOOO
OO
OO
OOO -> 6
OOOOO
O
O
O
O
O
O -> 7
OOO
OO
OO
OOO
OO
OO
OOO -> 8
OOO
OO
OO
OOOO
O
OO
OOO -> 9
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
{ {HI, LO, LO, LO, LO, LO, LO, LO, LO, LO},
{LO, HI, LO, LO, LO, LO, LO, LO, LO, LO},
{LO, LO, HI, LO, LO, LO, LO, LO, LO, LO},
{LO, LO, LO, HI, LO, LO, LO, LO, LO, LO},
{LO, LO, LO, LO, HI, LO, LO, LO, LO, LO},
{LO, LO, LO, LO, LO, HI, LO, LO, LO, LO},
{LO, LO, LO, LO, LO, LO, HI, LO, LO, LO},
{LO, LO, LO, LO, LO, LO, LO, HI, LO, LO},
{LO, LO, LO, LO, LO, LO, LO, LO, HI, LO},
{LO, LO, LO, LO, LO, LO, LO, LO, LO, HI} };
FILE* f;
void InitializeApplication(NET* Net)
{
INT n,i,j;
Net->Eta = 0.001;
Net->Epsilon = 0.0001;
for (n=0; n<NUM_DATA; n++) {
for (i=0; i<Y; i++) {
for (j=0; j<X; j++) {
Input[n][i*X+j] = (Pattern[n][i][j] == 'O') ? HI : LO;
}
}
}
f = fopen("ADALINE.txt", "w");
}
void WriteInput(NET* Net, INT* Input)
{
INT i;
for (i=0; i<N; i++) {
if (i%X == 0) {
fprintf(f, "\n");
}
fprintf(f, "%c", (Input[i] == HI) ? 'O' : ' ');
}
fprintf(f, " -> ");
}
void WriteOutput(NET* Net, INT* Output)
{
INT i;
INT Count, Index;
Count = 0;
for (i=0; i<M; i++) {
if (Output[i] == HI) {
Count++;
Index = i;
}
}
if (Count == 1)
fprintf(f, "%i\n", Index);
else
fprintf(f, "%s\n", "invalid");
}
void FinalizeApplication(NET* Net)
{
fclose(f);
}
/*****************************************************************************
*
INITIALIZATION
******************************************************************************
/
void GenerateNetwork(NET* Net)
{
INT i;
Net->InputLayer = (LAYER*) malloc(sizeof(LAYER));
Net->OutputLayer = (LAYER*) malloc(sizeof(LAYER));
Net->InputLayer->Units = N;
Net->InputLayer->Output = (INT*) calloc(N+1, sizeof(INT));
Net->InputLayer->Output[0] = BIAS;
Net->OutputLayer->Units = M;
Net->OutputLayer->Activation = (REAL*) calloc(M+1, sizeof(REAL));
Net->OutputLayer->Output = (INT*) calloc(M+1, sizeof(INT));
Net->OutputLayer->Error = (REAL*) calloc(M+1, sizeof(REAL));
Net->OutputLayer->Weight = (REAL**) calloc(M+1, sizeof(REAL*));
for (i=1; i<=M; i++) {
Net->OutputLayer->Weight[i] = (REAL*) calloc(N+1, sizeof(REAL));
}
Net->Eta = 0.1;
Net->Epsilon = 0.01;
}
void RandomWeights(NET* Net)
{
INT i,j;
for (i=1; i<=Net->OutputLayer->Units; i++) {
for (j=0; j<=Net->InputLayer->Units; j++) {
Net->OutputLayer->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
void SetInput(NET* Net, INT* Input, BOOL Protocoling)
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = Input[i-1];
}
if (Protocoling) {
WriteInput(Net, Input);
}
}
void GetOutput(NET* Net, INT* Output, BOOL Protocoling)
{
INT i;
Out = Net->InputLayer->Output[j];
Err = Net->OutputLayer->Error[i];
Net->OutputLayer->Weight[i][j] += Net->Eta * Err * Out;
}
}
}
/*****************************************************************************
*
SIMULATINGTHENET
******************************************************************************
/
void SimulateNet(NET* Net, INT* Input, INT* Target, BOOL Training, BOOL
Protocoling)
{
INT Output[M];
SetInput(Net, Input, Protocoling);
PropagateNet(Net);
GetOutput(Net, Output, Protocoling);
ComputeOutputError(Net, Target);
if (Training)
AdjustWeights(Net);
}
/*****************************************************************************
*
MAIN
******************************************************************************
/
void main()
{
NET Net;
REAL Error;
BOOL Stop;
INT n,m;
InitializeRandoms();
GenerateNetwork(&Net);
RandomWeights(&Net);
InitializeApplication(&Net);
do {
Error = 0;
Stop = TRUE;
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input[n], Output[n], FALSE, FALSE);
Error = MAX(Error, Net.Error);
Stop = Stop AND (Net.Error < Net.Epsilon);
}
Error = MAX(Error, Net.Epsilon);
printf("Training %0.0f%% completed ...\n", (Net.Epsilon / Error) * 100);
if (NOT Stop) {
for (m=0; m<10*NUM_DATA; m++) {
n = RandomEqualINT(0, NUM_DATA-1);
SimulateNet(&Net, Input[n], Output[n], TRUE, FALSE);
}
}
} while (NOT Stop);
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input[n], Output[n], FALSE, TRUE);
}
FinalizeApplication(&Net);
}
Simulator Output for the Pattern Recognition Application
OOO
OO
OO
OO
OO
OO
OOO -> 0
O
OO
OO
O
O
O
O -> 1
OOO
OO
O
O
O
O
OOOOO -> 2
OOO
OO
O
OOO
O
OO
OOO -> 3
O
OO
OO
OO
OOOOO
O
O -> 4
OOOOO
O
O
OOOO
O
OO
OOO -> 5
OOO
OO
O
OOOO
OO
OO
OOO -> 6
OOOOO
O
O
O
O
O
O -> 7
OOO
OO
OO
OOO
OO
OO
OOO -> 8
OOO
OO
OO
OOOO
O
OO
OOO -> 9
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = Input[i-1];
}
}
void GetOutput(NET* Net, REAL* Output)
{
INT i;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Output[i-1] = Net->OutputLayer->Output[i];
}
}
/*****************************************************************************
*
SUPPORTFORSTOPPEDTRAINING
******************************************************************************
/
void SaveWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];
}
}
}
}
void RestoreWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];
}
}
}
}
/*****************************************************************************
*
PROPAGATINGSIGNALS
******************************************************************************
/
void PropagateLayer(NET* Net, LAYER* Lower, LAYER* Upper)
{
INT i,j;
REAL Sum;
for (i=1; i<=Upper->Units; i++) {
Sum = 0;
{
INT l,i,j;
REAL Out, Err, dWeight;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Out = Net->Layer[l-1]->Output[j];
Err = Net->Layer[l]->Error[i];
dWeight = Net->Layer[l]->dWeight[i][j];
Net->Layer[l]->Weight[i][j] += Net->Eta * Err * Out + Net->Alpha *
dWeight;
Net->Layer[l]->dWeight[i][j] = Net->Eta * Err * Out;
}
}
}
}
/*****************************************************************************
*
SIMULATINGTHENET
******************************************************************************
/
void SimulateNet(NET* Net, REAL* Input, REAL* Output, REAL* Target, BOOL
Training)
{
SetInput(Net, Input);
PropagateNet(Net);
GetOutput(Net, Output);
ComputeOutputError(Net, Target);
if (Training) {
BackpropagateNet(Net);
AdjustWeights(Net);
}
}
void TrainNet(NET* Net, INT Epochs)
{
INT Year, n;
REAL Output[M];
for (n=0; n<Epochs*TRAIN_YEARS; n++) {
Year = RandomEqualINT(TRAIN_LWB, TRAIN_UPB);
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), TRUE);
}
}
void TestNet(NET* Net)
{
INT Year;
REAL Output[M];
TrainError = 0;
for (Year=TRAIN_LWB; Year<=TRAIN_UPB; Year++) {
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), FALSE);
TrainError += Net->Error;
}
TestError = 0;
for (Year=TEST_LWB; Year<=TEST_UPB; Year++) {
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), FALSE);
TestError += Net->Error;
}
fprintf(f, "\nNMSE is %0.3f on Training Set and %0.3f on Test Set",
TrainError / TrainErrorPredictingMean,
TestError / TestErrorPredictingMean);
}
void EvaluateNet(NET* Net)
{
INT Year;
REAL Output [M];
REAL Output_[M];
fprintf(f, "\n\n\n");
fprintf(f, "Year Sunspots Open-Loop Prediction Closed-Loop
Prediction\n");
fprintf(f, "\n");
for (Year=EVAL_LWB; Year<=EVAL_UPB; Year++) {
SimulateNet(Net, &(Sunspots [Year-N]), Output, &(Sunspots [Year]), FALSE);
SimulateNet(Net, &(Sunspots_[Year-N]), Output_, &(Sunspots_[Year]), FALSE);
Sunspots_[Year] = Output_[0];
fprintf(f, "%d %0.3f %0.3f
%0.3f\n",
FIRST_YEAR + Year,
Sunspots[Year],
Output [0],
Output_[0]);
}
}
/*****************************************************************************
*
MAIN
******************************************************************************
/
void main()
{
NET Net;
BOOL Stop;
REAL MinTestError;
InitializeRandoms();
GenerateNetwork(&Net);
RandomWeights(&Net);
InitializeApplication(&Net);
Stop = FALSE;
MinTestError = MAX_REAL;
do {
TrainNet(&Net, 10);
TestNet(&Net);
if (TestError < MinTestError) {
fprintf(f, " - saving Weights ...");
MinTestError = TestError;
SaveWeights(&Net);
}
else if (TestError > 1.2 * MinTestError) {
fprintf(f, " - stopping Training and restoring Weights ...");
Stop = TRUE;
RestoreWeights(&Net);
}
} while (NOT Stop);
TestNet(&Net);
EvaluateNet(&Net);
FinalizeApplication(&Net);
}
Simulator Output for the Time-Series Forecasting Application
NMSE is 0.879 on Training Set and 0.834 on Test Set - saving Weights ...
NMSE is 0.818 on Training Set and 0.783 on Test Set - saving Weights ...
NMSE is 0.749 on Training Set and 0.693 on Test Set - saving Weights ...
NMSE is 0.691 on Training Set and 0.614 on Test Set - saving Weights ...
NMSE is 0.622 on Training Set and 0.555 on Test Set - saving Weights ...
NMSE is 0.569 on Training Set and 0.491 on Test Set - saving Weights ...
NMSE is 0.533 on Training Set and 0.467 on Test Set - saving Weights ...
NMSE is 0.490 on Training Set and 0.416 on Test Set - saving Weights ...
NMSE is 0.470 on Training Set and 0.401 on Test Set - saving Weights ...
NMSE is 0.441 on Training Set and 0.361 on Test Set - saving Weights ...
.
.
.
NMSE is 0.142 on Training Set and 0.143 on Test Set
NMSE is 0.142 on Training Set and 0.146 on Test Set
NMSE is 0.141 on Training Set and 0.143 on Test Set
NMSE is 0.146 on Training Set and 0.141 on Test Set
NMSE is 0.144 on Training Set and 0.141 on Test Set
NMSE is 0.140 on Training Set and 0.142 on Test Set
NMSE is 0.144 on Training Set and 0.148 on Test Set
NMSE is 0.140 on Training Set and 0.139 on Test Set - saving Weights ...
NMSE is 0.140 on Training Set and 0.140 on Test Set
NMSE is 0.141 on Training Set and 0.138 on Test Set - saving Weights ...
.
.
.
NMSE is 0.104 on Training Set and 0.154 on Test Set
NMSE is 0.102 on Training Set and 0.160 on Test Set
NMSE is 0.102 on Training Set and 0.160 on Test Set
NMSE is 0.100 on Training Set and 0.157 on Test Set
NMSE is 0.105 on Training Set and 0.153 on Test Set
NMSE is 0.100 on Training Set and 0.155 on Test Set
NMSE is 0.101 on Training Set and 0.154 on Test Set
NMSE is 0.100 on Training Set and 0.158 on Test Set
NMSE is 0.107 on Training Set and 0.170 on Test Set - stopping Training
and restoring Weights ...
NMSE is 0.141 on Training Set and 0.138 on Test Set
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
/*****************************************************************************
*
RANDOMSDRAWNFROMDISTRIBUTIONS
******************************************************************************
/
void InitializeRandoms()
{
srand(4711);
}
INT RandomEqualINT(INT Low, INT High)
{
return rand() % (High-Low+1) + Low;
}
/*****************************************************************************
*
APPLICATION-SPECIFICCODE
******************************************************************************
/
#define NUM_DATA 5
#define X 10
#define Y 10
#define N (X * Y)
CHAR Pattern[NUM_DATA][Y][X] = { { "O O O O O ",
" O O O O O",
"O O O O O ",
" O O O O O",
"O O O O O ",
" O O O O O",
"O O O O O ",
" O O O O O",
"O O O O O ",
" O O O O O" },
{ "OO OO OO",
"OO OO OO",
" OO OO ",
" OO OO ",
"OO OO OO",
"OO OO OO",
" OO OO ",
" OO OO ",
"OO OO OO",
"OO OO OO" },
{ "OOOOO ",
"OOOOO ",
"OOOOO ",
"OOOOO ",
"OOOOO ",
" OOOOO",
" OOOOO",
" OOOOO",
" OOOOO",
" OOOOO" },
{ "O O O O",
" O O O ",
" O O O ",
"O O O O",
" O O O ",
" O O O ",
"O O O O",
" O O O ",
" O O O ",
"O O O O" },
{ "OOOOOOOOOO",
"O O",
"O OOOOOO O",
"O O O O",
"O O OO O O",
"O O OO O O",
"O O O O",
"O OOOOOO O",
"O O",
"OOOOOOOOOO" } };
CHAR Pattern_[NUM_DATA][Y][X] = { { " ",
" ",
" ",
" ",
" ",
" O O O O O",
"O O O O O ",
" O O O O O",
"O O O O O ",
" O O O O O" },
{ "OOO O O",
" O OOO OO",
" O O OO O",
" OOO O ",
"OO O OOO",
" O OOO O",
"O OO O O",
" O OOO ",
"OO OOO O ",
" O O OOO" },
{ "OOOOO ",
"O O OOO ",
"O O OOO ",
"O O OOO ",
"OOOOO ",
" OOOOO",
" OOO O O",
" OOO O O",
" OOO O O",
" OOOOO" },
/*****************************************************************************
*
INITIALIZATION
******************************************************************************
/
void GenerateNetwork(NET* Net)
{
INT i;
Net->Units = N;
Net->Output = (INT*) calloc(N, sizeof(INT));
Net->Threshold = (INT*) calloc(N, sizeof(INT));
Net->Weight = (INT**) calloc(N, sizeof(INT*));
for (i=0; i<N; i++) {
Net->Threshold[i] = 0;
Net->Weight[i] = (INT*) calloc(N, sizeof(INT));
}
}
void CalculateWeights(NET* Net)
{
INT i,j,n;
INT Weight;
for (i=0; i<Net->Units; i++) {
for (j=0; j<Net->Units; j++) {
Weight = 0;
if (i!=j) {
for (n=0; n<NUM_DATA; n++) {
Weight += Input[n][i] * Input[n][j];
}
}
Net->Weight[i][j] = Weight;
}
}
}
void SetInput(NET* Net, INT* Input)
{
INT i;
for (i=0; i<Net->Units; i++) {
Net->Output[i] = Input[i];
}
WriteNet(Net);
}
void GetOutput(NET* Net, INT* Output)
{
INT i;
for (i=0; i<Net->Units; i++) {
Output[i] = Net->Output[i];
}
WriteNet(Net);
}
/*****************************************************************************
*
PROPAGATINGSIGNALS
******************************************************************************
/
BOOL PropagateUnit(NET* Net, INT i)
{
INT j;
INT Sum, Out;
BOOL Changed;
Changed = FALSE;
Sum = 0;
for (j=0; j<Net->Units; j++) {
Sum += Net->Weight[i][j] * Net->Output[j];
}
if (Sum != Net->Threshold[i]) {
if (Sum < Net->Threshold[i]) Out = LO;
if (Sum > Net->Threshold[i]) Out = HI;
if (Out != Net->Output[i]) {
Changed = TRUE;
Net->Output[i] = Out;
}
}
return Changed;
}
void PropagateNet(NET* Net)
{
INT Iteration, IterationOfLastChange;
Iteration = 0;
IterationOfLastChange = 0;
do {
Iteration++;
if (PropagateUnit(Net, RandomEqualINT(0, Net->Units-1)))
IterationOfLastChange = Iteration;
} while (Iteration-IterationOfLastChange < 10*Net->Units);
}
/*****************************************************************************
*
SIMULATINGTHENET
******************************************************************************
/
void SimulateNet(NET* Net, INT* Input)
{
INT Output[N];
SetInput(Net, Input);
PropagateNet(Net);
GetOutput(Net, Output);
}
/*****************************************************************************
*
MAIN
******************************************************************************
/
void main()
{
NET Net;
INT n;
InitializeRandoms();
GenerateNetwork(&Net);
InitializeApplication(&Net);
CalculateWeights(&Net);
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input[n]);
}
for (n=0; n<NUM_DATA; n++) {
SimulateNet(&Net, Input_[n]);
}
FinalizeApplication(&Net);
}
Simulator Output for the Autoassociative Memory Application
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
O O O O O -> O O O O O
OO OO OO OO OO OO
OO OO OO OO OO OO
OO OO OO OO
OO OO OO OO
OO OO OO OO OO OO
OO OO OO OO OO OO
OO OO OO OO
OO OO OO OO
OO OO OO OO OO OO
OO OO OO -> OO OO OO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOOOO -> OOOOO
OOOOOOOO
OOOOOO
OOOOOO
OOOOOOOO
OOOOOO
OOOOOO
OOOOOOOO
OOOOOO
OOOOOO
O O O O -> O O O O
OOOOOOOOOO OOOOOOOOOO
OOOO
O OOOOOO O O OOOOOO O
OOOOOOOO
O O OO O O O O OO O O
O O OO O O O O OO O O
OOOOOOOO
O OOOOOO O O OOOOOO O
OOOO
OOOOOOOOOO -> OOOOOOOOOO
OOOOO
OOOOO
OOOOO
OOOOO
OOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
OOOOOOOOOO
O O O O O -> O O O O O
OOO O O OO OO OO
O OOO OO OO OO OO
O O OO O OO OO
OOO O OO OO
OO O OOO OO OO OO
O OOO O OO OO OO
O OO O O OO OO
O OOO OO OO
OO OOO O OO OO OO
O O OOO -> OO OO OO
OOOOO OOOOO
O O OOO OOOOO
O O OOO OOOOO
O O OOO OOOOO
OOOOO OOOOO
OOOOO OOOOO
OOO O O OOOOO
OOO O O OOOOO
OOO O O OOOOO
OOOOO -> OOOOO
O OOOO O O O O O
OO OOOO O O O
OOO OOOO O O O
OOOO OOOO O O O O
OOOO OOO O O O
OOOO OO O O O
O OOOO O O O O O
OO OOOO O O O
OOO OOOO O O O
OOOO OOOO -> O O O O
OOOOOOOOOO OOOOOOOOOO
OOOO
O O O OOOOOO O
OOOOOO
O OO O O O OO O O
O OO O O O OO O O
OOOOOO
O O O OOOOOO O
OOOO
OOOOOOOOOO -> OOOOOOOOOO
ASSINGMENT NO.
EXPERIMET NO.
Signature of Student
Signature of Professor
correct pronunciation with a given sequence of letters based on the context in which
the letters appear. In other words NETtalk learns to use the letters around the
currently pronounced phoneme that provide cues as to its intended phonemic
mapping.
Smartphone App
Features
Free calling to the US and Canada
Connectivity through WiFi / 3G / Edge
Record conversations
Import your contacts from your phone
Receive customized support through 611
Dial 2663 for FREE conference calling bridge
Free 411 Directory Assistance
How it Works
1. Visit m.nettalk.com to create your netTALK Smartphone Account.
2. Download and install the netTALK Smartphone App on your mobile device.
3. Use the username and password you created to login to the netTALK
smartphone App.
4. Call your friends for free and invite them to do the same.