clean up a bit

This commit is contained in:
Sven M. Hallberg 2013-05-23 22:54:49 +02:00
parent faebe355a8
commit 4c5ca5ceab

View file

@ -171,8 +171,7 @@ static void stringmap_merge(HHashSet *workset, HCFStringMap *dst, HCFStringMap *
static int fill_table_row(size_t kmax, HCFGrammar *g, HCFStringMap *row, static int fill_table_row(size_t kmax, HCFGrammar *g, HCFStringMap *row,
const HCFChoice *A) const HCFChoice *A)
{ {
HHashSet *workset; // to be deleted after compile HHashSet *workset;
// ~> alloc in g->arena
// initialize working set to the productions of A // initialize working set to the productions of A
workset = h_hashset_new(g->arena, h_eq_ptr, h_hash_ptr); workset = h_hashset_new(g->arena, h_eq_ptr, h_hash_ptr);
@ -182,7 +181,10 @@ static int fill_table_row(size_t kmax, HCFGrammar *g, HCFStringMap *row,
// run until workset exhausted or kmax hit // run until workset exhausted or kmax hit
size_t k; size_t k;
for(k=1; k<=kmax; k++) { for(k=1; k<=kmax; k++) {
// iterate over productions in workset... // allocate a fresh workset for the next round
HHashSet *nextset = h_hashset_new(g->arena, h_eq_ptr, h_hash_ptr);
// iterate over the productions in workset...
const HHashTable *ht = workset; const HHashTable *ht = workset;
for(size_t i=0; i < ht->capacity; i++) { for(size_t i=0; i < ht->capacity; i++) {
for(HHashTableEntry *hte = &ht->contents[i]; hte; hte = hte->next) { for(HHashTableEntry *hte = &ht->contents[i]; hte; hte = hte->next) {
@ -193,18 +195,20 @@ static int fill_table_row(size_t kmax, HCFGrammar *g, HCFStringMap *row,
assert(rhs != NULL); assert(rhs != NULL);
assert(rhs != CONFLICT); // just to be sure there's no mixup assert(rhs != CONFLICT); // just to be sure there's no mixup
// remove this production from workset
h_hashset_del(workset, rhs);
// calculate predict set; let values map to rhs // calculate predict set; let values map to rhs
HCFStringMap *pred = h_predict(k, g, A, rhs); HCFStringMap *pred = h_predict(k, g, A, rhs);
h_stringmap_replace(pred, NULL, rhs); h_stringmap_replace(pred, NULL, rhs);
// merge predict set into the row; accumulates conflicts in workset // merge predict set into the row
stringmap_merge(workset, row, pred); // accumulates conflicts in new workset
stringmap_merge(nextset, row, pred);
} }
} }
// switch to the updated workset
h_hashtable_free(workset);
workset = nextset;
// if the workset is empty, row is without conflict; we're done // if the workset is empty, row is without conflict; we're done
if(h_hashset_empty(workset)) if(h_hashset_empty(workset))
break; break;
@ -213,10 +217,8 @@ static int fill_table_row(size_t kmax, HCFGrammar *g, HCFStringMap *row,
h_stringmap_replace(row, CONFLICT, NULL); h_stringmap_replace(row, CONFLICT, NULL);
} }
if(k>kmax) // conflicts remain h_hashset_free(workset);
return -1; return (k>kmax)? -1 : 0;
else
return 0;
} }
/* Generate the LL(k) parse table from the given grammar. /* Generate the LL(k) parse table from the given grammar.
@ -483,7 +485,7 @@ int test_llk(void)
return 2; return 2;
} }
HParseResult *res = h_parse(p, (uint8_t *)"xya", 3); HParseResult *res = h_parse(p, (uint8_t *)"xa", 2);
if(res) if(res)
h_pprint(stdout, res->ast, 0, 2); h_pprint(stdout, res->ast, 0, 2);
else else