en:site:recherche:logiciels:sparqlwithspark:watdivs1
Differences
This shows you the differences between two versions of the page.
| Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
| en:site:recherche:logiciels:sparqlwithspark:watdivs1 [15/09/2016 09:57] – hubert | en:site:recherche:logiciels:sparqlwithspark:watdivs1 [16/09/2016 23:06] (current) – [WatDiv Query S1 plans] hubert | ||
|---|---|---|---|
| Line 1: | Line 1: | ||
| - | ====== WatDiv Query S1 ====== | + | {{indexmenu_n> |
| + | |||
| + | ====== WatDiv Query S1 plans====== | ||
| === SPARQL DF plan === | === SPARQL DF plan === | ||
| - | < | + | < |
| + | // random partitioning | ||
| val DATA = dfDefault | val DATA = dfDefault | ||
| - | //val DATA = subset | ||
| val t1 = DATA.where(s" | val t1 = DATA.where(s" | ||
| - | // | ||
| - | // 1.3s | ||
| - | |||
| - | // On a résolu le pb de mauvaise localité de la jointure: | ||
| - | // OK la localité est correcte (car on a complété t1 pour qu'il ait plusieurs morceaux non vides | ||
| - | // on a bien "input from memory" | ||
| val e1 = sc.parallelize(1 to NB_FRAGMENTS, | val e1 = sc.parallelize(1 to NB_FRAGMENTS, | ||
| val t1OK = t1.unionAll(e1) | val t1OK = t1.unionAll(e1) | ||
| var plan = t1OK | var plan = t1OK | ||
| - | // | ||
| - | // on reprend l' | + | // ordered by increasing |
| val orderedProp = List( | val orderedProp = List( | ||
| (" | (" | ||
| Line 30: | Line 25: | ||
| (" | (" | ||
| (" | (" | ||
| - | |||
| val triples = orderedProp.map{case(ns, | val triples = orderedProp.map{case(ns, | ||
| Line 51: | Line 45: | ||
| === SPARQL Hybrid DF plan === | === SPARQL Hybrid DF plan === | ||
| - | < | + | < |
| val subset = df.where(s" | val subset = df.where(s" | ||
| Line 57: | Line 51: | ||
| // Merging time=4, | // Merging time=4, | ||
| + | val DATA = subset | ||
| + | |||
| + | val t1 = DATA.where(s" | ||
| + | val e1 = sc.parallelize(1 to NB_FRAGMENTS, | ||
| + | val t1OK = t1.unionAll(e1) | ||
| + | var plan = t1OK | ||
| + | |||
| + | |||
| + | // ordered by increasing triple tp size | ||
| + | val orderedProp = List( | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | (" | ||
| + | |||
| + | val triples = orderedProp.map{case(ns, | ||
| + | val idP = getIdP(ns, p) | ||
| + | DATA.where(s" | ||
| + | }} | ||
| + | |||
| + | // next triples | ||
| + | for( i <- triples) { | ||
| + | plan = plan.join(i, | ||
| + | } | ||
| + | |||
| + | |||
| + | // Execute query plan for S1 | ||
| + | // | ||
| + | queryTimeDFIter(plan, | ||
| + | // 2,87 + 4,885 = 7,76s | ||
| </ | </ | ||
| Line 63: | Line 91: | ||
| === S2RDF plan === | === S2RDF plan === | ||
| - | < | + | < |
| val VP2EXP=VP2Random | val VP2EXP=VP2Random | ||
| Line 97: | Line 125: | ||
| === S2RDF+Hybrid plan === | === S2RDF+Hybrid plan === | ||
| - | < | + | < |
| // VP's partitioned by subject | // VP's partitioned by subject | ||
| val VP2EXP=VP2Subject | val VP2EXP=VP2Subject | ||
en/site/recherche/logiciels/sparqlwithspark/watdivs1.1473926229.txt.gz · Last modified: by hubert
