en:site:recherche:logiciels:sparqlwithspark:datasetwatdiv
                Differences
This shows you the differences between two versions of the page.
| Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
| en:site:recherche:logiciels:sparqlwithspark:datasetwatdiv [16/09/2016 23:00] – hubert | en:site:recherche:logiciels:sparqlwithspark:datasetwatdiv [16/09/2016 23:01] (current) – [Load VP's] hubert | ||
|---|---|---|---|
| Line 88: | Line 88: | ||
| Create one dataset per property. | Create one dataset per property. | ||
| - | < | + | < | 
| /* | /* | ||
| val df = num. | val df = num. | ||
| Line 119: | Line 119: | ||
| ===== Load VP's ===== | ===== Load VP's ===== | ||
| - | < | + | < | 
| // S2RDF VP | // S2RDF VP | ||
| Line 147: | Line 147: | ||
| //val dictSO = sqlContext.read.parquet(dictSOFile).repartition(NB_FRAGMENTS, | //val dictSO = sqlContext.read.parquet(dictSOFile).repartition(NB_FRAGMENTS, | ||
| dictSO.persist().count | dictSO.persist().count | ||
| - | // | ||
| // VP Dataset | // VP Dataset | ||
| // ------- | // ------- | ||
| - | //val encodedFile = dir + "/ | ||
| val vpDir = dir + "/ | val vpDir = dir + "/ | ||
| - | // CHRONO | + | // TIMER | 
| def queryTimeDFIter(q: | def queryTimeDFIter(q: | ||
| var l = new scala.collection.mutable.ArrayBuffer[Double](nbIter) | var l = new scala.collection.mutable.ArrayBuffer[Double](nbIter) | ||
| Line 172: | Line 170: | ||
| - | // define  | + | // Define the VPs to be loaded | 
| // | // | ||
| val nbP = dictP.count.toInt | val nbP = dictP.count.toInt | ||
en/site/recherche/logiciels/sparqlwithspark/datasetwatdiv.1474059625.txt.gz · Last modified:  by hubert
                
                