Valgrind is complaing about freed memory

hello, I got a clean bill of health from valgrind before I added the check function. Now valgrind is complaining about a pointer that was freed in unload and was fine before this function which I really don't understand.Here is the code and the valgrind error. Thank you.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
  #include "hashTable.h"
#include <stdbool.h>


table_node hash_table[TABLE_LENGTH];

bool loaded = true;

//assigning letters a-z to table nodes for buckets
void key_hash_t( ) 
{
	char key = 'a';
	for ( int i = 0; i < TABLE_LENGTH; i++ )
	{
		hash_table[i].letter = (key + i);
	}
}

//TODO: return and error checking 
bool load_hash_t(const char *file_name)
{
	char word[WORD_LENGTH];
	char c;
	int index = 0, hash_val = 0, size = get_file_size(file_name);
	FILE *file = fopen(file_name, "r");
	
	if (file == NULL)
	{
		printf ("File not found...\n");
		return false;
	}

	for (int word_count = 0; word_count < size; word_count++) 
	{
		node *new_node = malloc( sizeof(*new_node) ); 

		// make sure malloc was successful
		if (new_node == NULL)
		{
			printf ("Failed to allocate memory...\n");
			return false;
		}

		new_node->next = NULL;
		
		while ((c = fgetc(file)) != '\n')
		{
			word[index] = c;
			index++;
		}

		hash_val = hash(word);

		for (int j = 0; j < index; j++)
		{
			new_node->word[j] = word[j];
		}

		index = 0;

		// if this bucket has no nodes yet add it here
		if (hash_table[hash_val].next == NULL)
		{
			hash_table[hash_val].next = new_node;
		}
		else
		{
			//add new  node to end of list
			new_node->next = hash_table[hash_val].next;
			hash_table[hash_val].next = new_node;
		}
	}
	fclose(file);
	return true;
}

// delete list nodes
bool unload_hash_t( )
{
	/**
	 * loop through array
	 * traverse each list and
	 * delete each node
	 **/
	for (int i = 0; i < TABLE_LENGTH; i++)
	{
		node *cursor = hash_table[i].next;

		while (cursor != NULL)
		{
			node *temp = cursor;
			cursor = cursor->next;
			free(temp);
		}
	}
	return true;
}

/**
 * get size of dictionary file 
 * for load function
 * */
int get_file_size(const char *file_name)
{
	char c;
	int line_count = 0;
	FILE *file = fopen(file_name, "r");
	
	if (file == NULL)
	{
		printf("File not found..\n");
		exit(EXIT_FAILURE);
	}

	while ((c = fgetc(file)) != EOF)
	{
		if (c == '\n')
		{
			line_count++;
		}
	}
	fclose(file);

	return line_count;
}

/**
 * convert first char to lowercase
 * subtract by 97 to give correct 
 * array index
 **/
int hash(const char *str)
{
	if (str == NULL)
	{
		printf("NULL pointer..\n");
		exit(EXIT_FAILURE);
	}
	return (tolower(str[0]) - 97);
}

/**
 * loop threw array to find bucket
 * traverse list to find word
 * return true if found false if not
 */
bool check(const char* word)
{
	//hash word
	int hash_val = hash(word); 
	
	//must reassign to make editable
	char *eword = strdup(word);

	//convert word to lower
	word_tolower(eword);

	node *cursor = NULL;
	
	//iterate table array to find bucket
	for(int i = 0; i  < TABLE_LENGTH; i ++)
	{
		cursor = hash_table[hash_val].next;
		//traverse list for bucket hash_val
		while (cursor != NULL)
		{
			if(strcmp(eword, cursor->word) == 0)
			{
                                free(eword);
				return true;
			}
			else
			{
				cursor = cursor->next;
			}
		}
		
	}
	free(eword);

	return false;
}

 /**
  * iterate through array and
  * traverse each list counting each node
  * as I go by
  **/
unsigned int size( )
{
	if (!loaded)
	{
		printf("Dictionary not loaded..\n");
		return 0;
	}
	node *cursor = NULL;
	int node_count = 0;
		
	for (int i = 0; i < TABLE_LENGTH; i++)
	{
		cursor = hash_table[i].next;
		while (cursor != NULL)
		{
			node_count++;
			cursor = cursor->next;
		}
	}
	return node_count;
}

void word_tolower(char* word)
{
	if (word == NULL)
	{
		printf("NULL pointer ...\n");
		exit(EXIT_FAILURE);
	}

	for (int i = 0, length = strlen(word); i  < length; i ++)
	{
		word[i] = tolower(word[i]);
	}
}


Valgrind Message:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
arortell@gentoobox ~/Develpoment/Projects/C_Projects/Data_Structures/HashTable $ valgrind --leak-check=full --track-origins=yes ./hashTable
==7877== Memcheck, a memory error detector
==7877== Copyright (C) 2002-2013, and GNU GPL'd, by Julian Seward et al.
==7877== Using Valgrind-3.9.0 and LibVEX; rerun with -h for copyright info
==7877== Command: ./hashTable
==7877==
==7877== Conditional jump or move depends on uninitialised value(s)
==7877==    at 0x4C2C860: strcmp (mc_replace_strmem.c:730)
==7877==    by 0x400CA1: check (hashTable.c:179)
==7877==    by 0x400EBF: main (main.c:18)
==7877==  Uninitialised value was created by a heap allocation
==7877==    at 0x4C28730: malloc (vg_replace_malloc.c:291)
==7877==    by 0x40090C: load_hash_t (hashTable.c:40)
==7877==    by 0x400EB3: main (main.c:16)
==7877==
YESNumber of nodes = (3i)
==7877==
==7877== HEAP SUMMARY:
==7877==     in use at exit: 0 bytes in 0 blocks
==7877==   total heap usage: 6 allocs, 6 frees, 1,315 bytes allocated
==7877==
==7877== All heap blocks were freed -- no leaks are possible
==7877==
==7877== For counts of detected and suppressed errors, rerun with: -v
==7877== ERROR SUMMARY: 1 errors from 1 contexts (suppressed: 1 from 1)
 
Last edited on
Topic archived. No new replies allowed.