Segfault during malloc

Hello I am trying to load a large dictionary file into a hashtable for a spell checker.I am getting a segfault when word_count is 6318. I ran a backtrace in gdb and it is segfaulting on the marked line. I am not sure why. The text file is has 143091 words each on a single line ending with a '\n'. Thank you


Header:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
 #ifndef HASHTABLE
#define HASHTABLE

#define WORD_LENGTH  45
#define TABLE_LENGTH 26

#include <stdlib.h>
#include <stdio.h>
#include <ctype.h>
#include <stdbool.h>


//struct used to hold each word
typedef struct node 
{
    char word[WORD_LENGTH + 1];
    struct node *next;
	
} node;

// table 
typedef struct table_node 
{
    char letter;
    struct node *next;
	
} table_node;

int get_file_size(FILE *file);

int create_hash_t( );

void key_hash_t(FILE *file ); 

bool load_hash_t(FILE *file);

int hash(char *str);

#endif 


Source:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#include "hashTable.h"

table_node hash_table[TABLE_LENGTH];

// TODO: Return type, error checking
int create_hash_t( )
{
	for ( int i = 0; i < TABLE_LENGTH; i++ )
	{
		hash_table[i].next = malloc( sizeof( *hash_table[i].next ) );
		
		if ( hash_table[i].next == NULL )
		{ 
			return 1;
		}
	}
	return 0;
}

//TODO: Error checking
void key_hash_t( ) 
{
	char key = 'a';
	for ( int i = 0; i < TABLE_LENGTH; i++ )
	{
		hash_table[i].letter = (key + i);
	}
}

//TODO: 
bool load_hash_t(FILE *file)
{
	char word[WORD_LENGTH];
	char c;
	int index = 0, hash_val = 0;
	//int size = get_file_size(file);
	file = fopen("large", "r");
	
	
	if (file == NULL)
	{
		printf ("File not found...\n");
		exit(1);
	}
	
	for (int word_count = 0; word_count < 6318; word_count++) 
	{	
		node *current_node = malloc( sizeof(current_node) );
		node *new_node = malloc( sizeof(new_node) ); //<---- segfaults here when word_count is 6318
		
		// make sure malloc was successful
		if (current_node == NULL || new_node == NULL)
		{
			printf ("Failed to allocate memory...\n");
			exit(1);
		}
		
		current_node->next = NULL;
		new_node->next = NULL;
		
		while ((c = fgetc(file)) != '\n')
		{
			word[index] = c;
			index++;
		}
	
		hash_val = hash(word);         
				
		for (int j = 0; j < index; j++)
		{
			new_node->word[j] = word[j];
		}
		
		index = 0;
		
		// if this bucket has no nodes yet add it here
		if (hash_table[hash_val].next == NULL)
		{
			hash_table[hash_val].next = new_node;
		}
		else
		{
			// point to correct bucket
			current_node->next = hash_table[hash_val].next;
			
			// traverse to the end of the list
			while (current_node->next != NULL)
			{
				current_node = current_node->next;
			}
			
			//add new node to end of list
			current_node->next = new_node;
		}
	}
	
	return true;
}

//TODO: 
int get_file_size(FILE *file)
{
	char c;
	int line_count = 0;
	
	while ((c = fgetc(file)) != EOF)
	{
		if (c == '\n')
		{
			line_count++;
		}
	}
	fclose(file);
	return line_count;
}

//TODO: 
int hash(char *str)
{
	return (tolower(str[0]) - 97);
}

> node *new_node = malloc( sizeof(new_node) );
'newnode' is a pointer
sizeof(new_node) would give you the size of a pointer, not of a 'node'. If that happens to differ, then you would be trying to access memory that was not reserved.
You may use valgrind to check it.
That makes since to me I thought it should be sizeof(node) but I was told on another forum to use the variable name not the type name. I will try that thank you for your reply.
That did it THANK YOU I really appreciate your help.
> to use the variable name not the type name
yes, it is less error prone.
node *new_node = malloc( sizeof( *new_node ) );
Topic archived. No new replies allowed.