File size: 2,631 Bytes
ed9f15f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Model Test</title>
</head>
<body>
    <h1>Testing Model Loading...</h1>
    <div id="status">Initializing...</div>
    <div id="results"></div>
    
    <script type="module">

        import * as ort from 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.17.0/dist/esm/ort.min.js';

        

        const statusDiv = document.getElementById('status');

        const resultsDiv = document.getElementById('results');

        

        async function testModels() {

            try {

                statusDiv.textContent = 'Configuring ONNX Runtime...';

                

                ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.17.0/dist/';

                ort.env.wasm.numThreads = 4;

                

                // Test Classifier

                statusDiv.textContent = 'Loading Classifier...';

                const classifier = await ort.InferenceSession.create('./models/classifier_model_compressed/model.onnx');

                resultsDiv.innerHTML += '<p>βœ… Classifier loaded successfully!</p>';

                

                // Test Quality

                statusDiv.textContent = 'Loading Quality Model...';

                const quality = await ort.InferenceSession.create('./models/poor_good_compressed/model.onnx');

                resultsDiv.innerHTML += '<p>βœ… Quality model loaded successfully!</p>';

                

                // Test Grader

                statusDiv.textContent = 'Loading Grader...';

                const grader = await ort.InferenceSession.create('./models/grader_model_compressed/model.onnx');

                resultsDiv.innerHTML += '<p>βœ… Grader model loaded successfully!</p>';

                

                // Test YOLO

                statusDiv.textContent = 'Loading YOLO...';

                const yolo = await ort.InferenceSession.create('./models/yolo-cropper/best.onnx');

                resultsDiv.innerHTML += '<p>βœ… YOLO model loaded successfully!</p>';

                

                statusDiv.textContent = 'βœ… All models loaded successfully!';

                resultsDiv.innerHTML += '<h2>πŸŽ‰ Ready to use!</h2>';

                

            } catch (error) {

                statusDiv.textContent = '❌ Error: ' + error.message;

                resultsDiv.innerHTML += '<p style="color:red;">Error: ' + error.stack + '</p>';

            }

        }

        

        testModels();

    </script>
</body>
</html>