mirror of
https://github.com/LCPQ/QUESTDB_website.git
synced 2024-11-03 20:53:59 +01:00
Faster uniq function
This commit is contained in:
parent
f694529175
commit
5d637180c7
@ -112,6 +112,7 @@ draft: false
|
|||||||
for (mol of uniq(subset.map(d => d.molecule))) {
|
for (mol of uniq(subset.map(d => d.molecule))) {
|
||||||
const submol = subset.filter(d => d.molecule === mol)
|
const submol = subset.filter(d => d.molecule === mol)
|
||||||
const source = await submol.findAsync(async (d) => {
|
const source = await submol.findAsync(async (d) => {
|
||||||
|
// const source = submol.find((d) => {
|
||||||
if (db.sets.get(d.set.name)[d.set.index] === "10.1021/acs.jctc.8b01205") {
|
if (db.sets.get(d.set.name)[d.set.index] === "10.1021/acs.jctc.8b01205") {
|
||||||
return d.method.name === "CASPT2" && d.method.basis === "aug-cc-pVDZ"
|
return d.method.name === "CASPT2" && d.method.basis === "aug-cc-pVDZ"
|
||||||
} else {
|
} else {
|
||||||
|
@ -279,9 +279,9 @@ class dataFileBase {
|
|||||||
switch (trueTypeOf(file)) {
|
switch (trueTypeOf(file)) {
|
||||||
case String.name:
|
case String.name:
|
||||||
file = getFullDataPath(file)
|
file = getFullDataPath(file)
|
||||||
const maxAge= (DebugMode.Enabled,0,600)
|
// const maxAge= (DebugMode.Enabled,0,600)
|
||||||
// var str = await getTextFromFileUrlAsync(file,{"Cache-Control":`max-age=${maxAge}`})
|
// var str = await getTextFromFileUrlAsync(file,{"Cache-Control":`max-age=${maxAge}`})
|
||||||
var str = await getTextFromFileUrl(file)
|
var str = await getTextFromFileUrl(file) //TODO
|
||||||
break;
|
break;
|
||||||
case File.name:
|
case File.name:
|
||||||
var str = await getTextFromUploadedFileAsync(file)
|
var str = await getTextFromUploadedFileAsync(file)
|
||||||
@ -291,6 +291,20 @@ class dataFileBase {
|
|||||||
dat.sourceFile = new websiteFile(file)
|
dat.sourceFile = new websiteFile(file)
|
||||||
return dat
|
return dat
|
||||||
}
|
}
|
||||||
|
static load(file, kind = undefined) {
|
||||||
|
switch (trueTypeOf(file)) {
|
||||||
|
case String.name:
|
||||||
|
file = getFullDataPath(file)
|
||||||
|
var str = getTextFromFileUrl(file)
|
||||||
|
break;
|
||||||
|
case File.name:
|
||||||
|
var str = (async () => await getTextFromUploadedFileAsync(file))().then(x=>x)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var dat = this.loadString(str, kind);
|
||||||
|
dat.sourceFile = new websiteFile(file)
|
||||||
|
return dat
|
||||||
|
}
|
||||||
_OnReadMetaPair(key, value) {
|
_OnReadMetaPair(key, value) {
|
||||||
switch (key) {
|
switch (key) {
|
||||||
case "molecule":
|
case "molecule":
|
||||||
|
@ -4,9 +4,11 @@ async function loadAllData() {
|
|||||||
fluo: [],
|
fluo: [],
|
||||||
};
|
};
|
||||||
for (const f of getAbsFilesName()) {
|
for (const f of getAbsFilesName()) {
|
||||||
|
// dic.abs.push(VertDataFile.load(f,VertExcitationKinds.Absorbtion))
|
||||||
dic.abs.push(await VertDataFile.loadAsync(f,VertExcitationKinds.Absorbtion))
|
dic.abs.push(await VertDataFile.loadAsync(f,VertExcitationKinds.Absorbtion))
|
||||||
}
|
}
|
||||||
for (const f of getFluoFilesName()) {
|
for (const f of getFluoFilesName()) {
|
||||||
|
// dic.fluo.push(VertDataFile.load(f,VertExcitationKinds.Fluorescence))
|
||||||
dic.fluo.push(await VertDataFile.loadAsync(f,VertExcitationKinds.Fluorescence))
|
dic.fluo.push(await VertDataFile.loadAsync(f,VertExcitationKinds.Fluorescence))
|
||||||
}
|
}
|
||||||
return dic;
|
return dic;
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
function uniq(array)
|
function uniq(array)
|
||||||
{
|
{
|
||||||
return uniqueArray = array.filter((obj1,index) => {
|
if (array.length == 0) return [];
|
||||||
return index === array.findIndex(obj2 => {
|
var sortedArray = array.sort().map( x => [x, JSON.stringify(x)] );
|
||||||
return JSON.stringify(obj1) === JSON.stringify(obj2);
|
var uniqueArray = [ sortedArray[0][0] ];
|
||||||
});
|
for (let i=1 ; i<sortedArray.length ; i++) {
|
||||||
});
|
if ( sortedArray[i][1] != sortedArray[i-1][1])
|
||||||
|
uniqueArray.push(sortedArray[i][0])
|
||||||
|
}
|
||||||
|
return uniqueArray;
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user