Reputation: 11
I'm trying to tokenize strings from stdin. I am only interested in characters and build an array of each string (ignoring non-characters). For some reason when I read 24 characters or more from stdin I receive the error:
free(): invalid next size (fast):
This is the relevant code...it works fine on smaller strings (23 characters or less)
char **tokenize(int *nbr_words) {
char **list = calloc(INITIAL_SIZE, sizeof(char *));
char * temp = NULL;
temp = malloc(sizeof(200));
while(fgets(temp,200,stdin)){
char * newWord = NULL;
newWord = malloc(sizeof(100));
int i = 0;
while(temp[i] != '\n'){
if(isalpha(temp[i]) && temp[i+1] != '\n'){
strncat(newWord,&temp[i],1);
i++;
}
else if(isalpha(temp[i]) && temp [i+1] == '\n'){
strncat(newWord,&temp[i],1);
list[*nbr_words] = newWord;
*nbr_words += 1;
printf("%s\n",list[*nbr_words -1]);
i++;
if(*nbr_words % 10 == 9){
list = realloc(list, *nbr_words + 10);
}
free(newWord);
newWord = malloc(sizeof(100));
*newWord = NULL;
}else{
if(*newWord == NULL){
i++;
}
else if(*nbr_words % 10 != 9){
list[*nbr_words] = newWord;
*nbr_words += 1;
printf("%s\n",list[*nbr_words-1]);
i++;
free(newWord);
newWord = malloc(sizeof(100));
*newWord = NULL;
}else{
list = realloc(list, *nbr_words + 10);
list[*nbr_words] = newWord;
*nbr_words += 1;
printf("%s\n",list[*nbr_words-1]);
i++;
free(newWord);
newWord = malloc(sizeof(100));
*newWord = NULL;
}
}
}
free(temp);
temp = malloc(sizeof(200));
*temp = NULL;
}
return list;
}
Upvotes: 0
Views: 643
Reputation: 23058
You keep allocating temp
and newWord
for size sizeof(200)
or sizeof(100)
, while both of them are equal to sizeof(int)
which is much smaller than you expect.
Modify
temp = malloc(sizeof(200));
newWord = malloc(sizeof(100));
into
temp = malloc(200);
newWord = malloc(100);
Upvotes: 3