Fixed typo in Gruntfile; added Make tasks to compile frontend assets; Modernized the gathering of benchmark result data for the test view

This commit is contained in:
2025-07-02 15:58:13 -04:00
parent 771d26ec3b
commit 190ae9f302
12 changed files with 171 additions and 26 deletions

View File

@ -44,7 +44,7 @@ module.exports = function(grunt) {
}
},
js: {
files: ['assets/scripts/**/*.js'],
files: ['assets/scripts/**/*.coffee'],
tasks: ['coffee'],
options: {
atBegin: true,

View File

@ -6,6 +6,16 @@ VERSION=`git describe --tags`
build:
@go build -o $(NAME) -ldflags "-X git.metaunix.net/bitgoblin/blt/app.AppVersion=$(VERSION)"
.PHONY: grunt
## grunt: Compile frontend assets.
grunt:
@npm run grunt
.PHONY: grunt-watch
## grunt-watch: Compile frontend assets while watching for changes.
grunt-watch:
@npm run grunt watch
.PHONY: run
## run: Build and Run in development mode.
run: build

View File

@ -1,2 +1,7 @@
root = if typeof window isnt 'undefined' then window else global
root.roundDecimal = (value) ->
return Math.round(value * 100) / 100
$ ->
console.log('DOM is ready.')

View File

@ -0,0 +1,62 @@
testId = $('#results-table').data('test-id')
$ ->
$('#result-form').on 'submit', (e) ->
e.preventDefault()
form = $(this)
formData = $(this).serialize()
benchmarkId = $(this).find('[name="result_benchmark"]').val()
$.post '/api/v1/result/add', formData, (response) ->
if response == 'success'
fetchTestBenchmarkResults(testId, benchmarkId)
form[0].reset()
fetchTestBenchmarkResults = (testId, benchmarkId) ->
try
benchmarkSearchParams = new URLSearchParams
benchmark_id: benchmarkId
benchmarkRes = await fetch("/api/v1/benchmark/details?#{benchmarkSearchParams}")
benchmarkData = await benchmarkRes.json()
resultSearchParams = new URLSearchParams
test_id: testId
benchmark_id: benchmarkId
resultRes = await fetch("/api/v1/result/list?#{resultSearchParams}")
resultData = await resultRes.json()
avg_total = 0
min_total = 0
max_total = 0
for result in resultData
avg_total += result.AverageScore
min_total += result.MinimumScore if result.MinimumScore
max_total += result.MaximumScore if result.MaximumScore
tableRow = $("#results-table tr[data-benchmark-id=#{benchmarkId}]")
tableRow.empty()
tableRow.append('<td><a href="/benchmark/' + benchmarkData.ID + '">' + benchmarkData.Name + '</a></td>')
tableRow.append('<td>' + benchmarkData.ScoringType + '</td>')
tableRow.append('<td>' + resultData.length + '</td>')
if resultData.length != 0
tableRow.append('<td>' + roundDecimal(avg_total / resultData.length) + '</td>')
else
tableRow.append('<td>N/a</td>')
if min_total != 0
tableRow.append('<td>' + roundDecimal(min_total / resultData.length) + '</td>')
tableRow.append('<td>' + roundDecimal(max_total / resultData.length) + '</td>')
else
tableRow.append('<td>N/a</td>')
tableRow.append('<td>N/a</td>')
catch error
console.error 'An error occurred while fetching benchmark results.', error
$('#results-table tbody tr').each (index, tr) ->
benchmarkId = $(tr).data('benchmark-id')
console.log("Fetching results for benchmark id: " + benchmarkId)
fetchTestBenchmarkResults(testId, benchmarkId)

View File

@ -21,6 +21,9 @@ func main() {
// initialize database
models.Open()
// initialize base renderer
f.Use(flamego.Renderer())
// initialize templating engine
f.Use(template.Templater())

View File

@ -11,7 +11,4 @@ type Hardware struct {
// has many tests
Tests []Test
// has many results
Results []Result
}

View File

@ -10,10 +10,6 @@ type Result struct {
MinimumScore float32
MaximumScore float32
// belongs to Hardware
HardwareID int
Hardware Hardware
// belongs to Benchmark
BenchmarkID int
Benchmark Benchmark

View File

@ -3,7 +3,7 @@
<div class="row">
<h2>{{ .test.Name }}</h2>
<p><a href="/hardware/{{ .test.Hardware.ID }}">link to hardware tested.</a></p>
<p>Hardware tested: <a href="/hardware/{{ .test.Hardware.ID }}">{{ .test.Hardware.Name }}</a></p>
<p>{{ .test.Description }}</p>
@ -19,7 +19,7 @@
<h4>Add new result:</h4>
<form class="u-full-width" action="/result/add" method="POST">
<form id="result-form" class="u-full-width" action="/result/add" method="POST">
<div class="row">
<div class="columns four">
<label for="result_benchmark">
@ -35,21 +35,21 @@
<div class="columns two">
<label for="result_avg">
Average score:
<input id="result_avg" class="u-full-width" type="number" name="result_avg" value="0">
<input id="result_avg" class="u-full-width" type="number" name="result_avg" step="0.01" placeholder="0.00">
</label>
</div>
<div class="columns two">
<label for="result_min">
Minimum score:
<input id="result_min" class="u-full-width" type="number" name="result_min" value="0">
<input id="result_min" class="u-full-width" type="number" name="result_min" step="0.01" placeholder="0.00">
</label>
</div>
<div class="columns two">
<label for="result_max">
Maximum score:
<input id="result_max" class="u-full-width" type="number" name="result_max" value="0">
<input id="result_max" class="u-full-width" type="number" name="result_max" step="0.01" placeholder="0.00">
</label>
</div>
@ -58,34 +58,30 @@
</div>
<input type="hidden" name="result_test" value="{{ .test.ID }}">
<input type="hidden" name="result_hardware" value="{{ .test.Hardware.ID }}">
</div>
</form>
<hr>
<h4>Latest Benchmark Results:</h4>
<h4>Benchmark Results:</h4>
{{ $length := len .test.Results }} {{ if eq $length 0 }}
{{ $length := len .test.Benchmarks }} {{ if eq $length 0 }}
<p>There are currently no benchmarks recorded in this test.</p>
{{ else }}
<table class="u-full-width card-2">
<table id="results-table" class="u-full-width card-2" data-test-id="{{ .test.ID }}">
<thead>
<tr>
<th>Benchmark</th>
<th>Scoring Type</th>
<th># of Results</th>
<th>Average</th>
<th>Minimum</th>
<th>Maximum</th>
</tr>
</thead>
<tbody>
{{ range $res := .test.Results }}
<tr>
<td>{{ $res.Benchmark.Name }}</td>
<td>{{ $res.AverageScore }}</td>
<td>{{ if eq $res.MinimumScore 0.0 }}N/a{{ else }}{{ $res.MinimumScore }}{{ end }}</td>
<td>{{ if eq $res.MaximumScore 0.0 }}N/a{{ else }}{{ $res.MaximumScore }}{{ end }}</td>
</tr>
{{ range $benchmark := .test.Benchmarks }}
<tr data-benchmark-id="{{ $benchmark.ID }}"></tr>
{{ end }}
</tbody>
</table>
@ -96,4 +92,6 @@
<p><a href="/test">Back</a></p>
</div>
<script src="/js/test.js"></script>
{{ template "footer" . }}

View File

@ -2,7 +2,6 @@ package forms
type ResultForm struct {
Test int `form:"result_test" validate:"required"`
Hardware int `form:"result_hardware" validate:"required"`
Benchmark int `form:"result_benchmark" validate:"required"`
AverageScore float32 `form:"result_avg" validate:"required"`
MinimumScore float32 `form:"result_min"`

View File

@ -58,4 +58,18 @@ func RegisterRoutes(f *flamego.Flame) {
f.Group("/result", func() {
f.Post("/add", binding.Form(forms.ResultForm{}), routes.ResultPostCreate)
})
// API v1 routes
f.Group("/api", func () {
f.Group("/v1", func() {
f.Group("/benchmark", func() {
f.Get("/details", routes.ApiV1BenchmarkDetails)
})
f.Group("/result", func() {
f.Post("/add", binding.Form(forms.ResultForm{}), routes.ApiV1ResultAdd)
f.Get("/list", routes.ApiV1ResultList)
})
})
})
}

62
web/routes/api_v1.go Normal file
View File

@ -0,0 +1,62 @@
package routes
import (
"log"
"github.com/flamego/binding"
"github.com/flamego/flamego"
"github.com/flamego/validator"
"git.metaunix.net/bitgoblin/blt/models"
"git.metaunix.net/bitgoblin/blt/web/forms"
)
func ApiV1BenchmarkDetails(c flamego.Context, r flamego.Render) {
// find benchmark ID from request
benchmarkID := c.Query("benchmark_id")
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
// return JSON response
r.JSON(200, benchmark)
}
func ApiV1ResultAdd(c flamego.Context, form forms.ResultForm, errs binding.Errors, r flamego.Render) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
result := models.Result{
TestID: form.Test,
BenchmarkID: form.Benchmark,
AverageScore: form.AverageScore,
MinimumScore: form.MinimumScore,
MaximumScore: form.MaximumScore,
}
_ = models.DB.Create(&result)
r.JSON(200, "success")
}
func ApiV1ResultList(c flamego.Context, r flamego.Render) {
// find benchmark and test IDs from request
benchmarkID := c.Query("benchmark_id")
testID := c.Query("test_id")
// find results from the DB that match the benchmark and test
var results []models.Result
models.DB.Where("test_id = ? AND benchmark_id = ?", testID, benchmarkID).Find(&results)
// return JSON response
r.JSON(200, results)
}

View File

@ -26,7 +26,6 @@ func ResultPostCreate(c flamego.Context, form forms.ResultForm, errs binding.Err
result := models.Result{
TestID: form.Test,
HardwareID: form.Hardware,
BenchmarkID: form.Benchmark,
AverageScore: form.AverageScore,
MinimumScore: form.MinimumScore,