-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathscript.js
More file actions
146 lines (134 loc) · 5.18 KB
/
script.js
File metadata and controls
146 lines (134 loc) · 5.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
let list;
const loading = document.getElementById('loading');
const imgContainer = document.getElementById('imgContainer');
async function displayImg(url) {
const img = new Image();
img.src = url;
await img.decode();
imgContainer.replaceChildren(img);
}
dot = (a, b) => a.reduce((acc, n, i) => acc + (n * b[i]), 0);
cos = (a, b) => dot(a, b) / Math.sqrt(dot(a,a) * dot(b,b));
async function analyze() {
loading.textContent = 'Analyzing the image. . .';
const detection = await faceapi.detectSingleFace(imgContainer.firstChild).withFaceLandmarks().withAgeAndGender().withFaceDescriptor();
if (detection === undefined) {
loading.textContent = 'No face detected. Try another image.';
return;
}
const desc = detection.descriptor;
let sex = detection.gender; let s;
if ( confirm(`The program thinks you are ${sex} with ${(detection.genderProbability*100).toFixed(2)}% confidence. Is this correct?`) ) {
sex = sex.substring(0, 1);
} else {
if ( sex == 'female' ) { sex = 'm'; } else { sex = 'f'; }
}
const i = (sex == 'm') ? 1 : 2;
let list2 = structuredClone(list);
for (let j = 0; j < list2.length; j++) {
const len2 = list2[j].length;
if (len2 > 1) {
list2[j][0][i] = cos(list2[j][0][i], desc)*100;
}
for (let k = 0; k < list2[j][len2-1].length; k++) {
list2[j][len2-1][k][i] = cos(list2[j][len2-1][k][i], desc)*100;
}
list2[j][len2-1].sort(function(a, b){ return b[i] - a[i]; });
}
function grpScore(a) {
if (a.length > 1) { return Math.max(a[0][i],a[1][0][i]); }
return a[0][0][i];
}
list2.sort(function(a, b){ return grpScore(b) - grpScore(a); });
loading.textContent = 'Results!';
console.log('Match Results');
resultsContainer = document.getElementById('resultsContainer');
resultsContainer.innerHTML =
`<br>
<p>
<h2>Match Results</h2>
<br>
The following shows to what extent the selected image matches the sample for each phenotype. <b>The correct result will very likely be among the first ones,</b> the remaining are shown merely for information's sake. Again, "high" values for other phenotypes <b>do not</b> necessarily mean the submitted image is even partially of that type.
<br>
</p>`
for (const a of list2) {
console.log(' ');
resultsContainer.innerHTML += `<br>`;
const aLen = a.length;
if (aLen > 1) {
console.log(`${a[0][0]}: ${a[0][i]}%`);
resultsContainer.innerHTML +=
`<div>
<img src="faces_lowres/basic/${a[0][0].toLowerCase()}${sex}.jpg">
<div>
<a href="http://humanphenotypes.net/basic/${a[0][0]}.html"><h3>${a[0][0]}</h3></a>
${a[0][i]}% similarity
</div>
</div>`;
}
for (const arr of a[aLen-1]) {
console.log(` ${arr[0]}: ${arr[i]}%`);
resultsContainer.innerHTML +=
`<div>
<img src="faces_lowres/${arr[0].toLowerCase()}${sex}.jpg" style="margin-left: 30px">
<div>
<a href="http://humanphenotypes.net/${arr[0]}.html"><h3>${arr[0]}</h3></a>
${arr[i]}% similarity
</div>
</div>`;
}
}
}
document.getElementById('imgInp').onchange = async function() {
const [file] = this.files;
if (file) {
await displayImg(URL.createObjectURL(file));
if (document.getElementById('loader') == null) analyze();
}
};
(async () => {
await faceapi.loadSsdMobilenetv1Model('/models');
await faceapi.loadFaceLandmarkModel('/models');
await faceapi.loadFaceRecognitionModel('/models');
await faceapi.loadAgeGenderModel('/models');
const response = await fetch('list.json');
const text = await response.text();
list = JSON.parse(text);
hexToF32Arr = (str) => new Float32Array(new Uint8Array([...atob(str)].map(c => c.charCodeAt(0))).buffer);
hexToF32 = (arr) => [ arr[0], hexToF32Arr(arr[1]), hexToF32Arr(arr[2]) ];
for (let i = 0; i < list.length; i++) {
const len = list[i].length;
if (len > 1) {
list[i][0] = hexToF32(list[i][0]);
}
for (let j = 0; j < list[i][len-1].length; j++) {
list[i][len-1][j] = hexToF32(list[i][len-1][j]);
}
}
loading.textContent = 'Models fetched!';
document.getElementById('loader').remove();
if ( imgContainer.children.length > 0 ) analyze();
})();
async function getDesc(str) {
await displayImg(`faces/${str.toLowerCase()}.jpg`);
const detection = await faceapi.detectSingleFace(imgContainer.firstChild).withFaceLandmarks().withFaceDescriptor();
return btoa(String.fromCharCode(...(new Uint8Array(detection.descriptor.buffer)))); // base64 representation of descriptor
}
async function generateDescriptors() {
if ( document.getElementById('loader') == null ) {
let list2 = structuredClone(list);
const sex = ['m','f'];
for (let i = 0; i < 2; i++) {
for (let j = 0; j < list2.length; j++) {
const len2 = list2[j].length;
if (len2 > 1) {
list2[j][0][i+1] = await getDesc( 'basic/' + list2[j][0][0] + sex[i] );
}
for (let k = 0; k < list2[j][len2-1].length; k++) {
list2[j][len2-1][k][i+1] = await getDesc( list2[j][len2-1][k][0] + sex[i] );
}
}
}
loading.textContent = JSON.stringify(list2, null, 2);
} else { alert('Please wait until the models are fetched.'); }
}