69 Commits

Author SHA1 Message Date
431d94660d Added ability to edit benchmark parameters
All checks were successful
ci/woodpecker/tag/woodpecker Pipeline was successful
ci/woodpecker/release/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-10-04 12:23:06 -04:00
Gregory Ballantine
fd66449c28 Added ability to edit tests and hardware
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-10-02 16:18:40 -04:00
Gregory Ballantine
cb55a19ada Switching from air to fresh for auto-reloading
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
2025-10-02 14:42:14 -04:00
Gregory Ballantine
352950467c Finishing the test/edit route
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
2025-10-02 11:28:56 -04:00
Gregory Ballantine
abc4abe80e Updating Woodpecker CI config 2025-10-02 11:25:14 -04:00
Gregory Ballantine
ff8acd493b [Issue #2] - Added .air.toml for using air with some sane defaults; added README to document how to use it (plus some info on the app) 2025-10-02 11:22:21 -04:00
Gregory Ballantine
4b98322022 Continued work on the test/edit post routet
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-08-12 11:40:56 -04:00
60d8554cf1 Updated the test edit page (still need to do the post page)
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2025-08-10 01:44:33 -04:00
c19bb2108c Adding local tmp directory to git ignore
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-07-28 08:00:21 -04:00
27cff3e79b Adding event: tag to the asset packaging step
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/tag/woodpecker Pipeline failed
2025-07-14 14:17:12 -04:00
03ef5cc905 Updating Grunt commands 2025-07-14 14:17:11 -04:00
f81879302d Started working on test edit page; removed nodemon because it wasn't working 2025-07-14 14:17:11 -04:00
9ecff3a25d Added nodemon to dev dependencies to automatially reload the app on changes 2025-07-14 14:17:10 -04:00
bef17b5a59 Fixed typo in Gruntfile; added Make tasks to compile frontend assets; Modernized the gathering of benchmark result data for the test view 2025-07-14 14:17:10 -04:00
82f42ecbed Improved some styles 2025-07-14 14:17:09 -04:00
673e2e9634 [Issue #8] - Licensing project under the BSD 2-Clause license (#10)
Adding license to project

Co-authored-by: Gregory Ballantine <gballantine@bitgoblin.tech>
Reviewed-on: #10
2025-07-14 14:17:07 -04:00
178894a360 3-add-gruntjs (#9)
Adding Grunt.js to compile SASS and CoffeeScript assets

Co-authored-by: Gregory Ballantine <gballantine555@gmail.com>
Reviewed-on: #9
2025-07-14 14:16:58 -04:00
5fc381c4b9 Updated some styles for tables and links 2025-07-14 13:53:50 -04:00
c9d2f7114a Added tests to hardware page 2025-07-14 13:53:50 -04:00
bce063c5f4 Updated some styles 2025-07-14 13:53:50 -04:00
b54d31213b Updated some styles 2025-07-14 13:53:50 -04:00
ae4198f3cc Fixed results table in test view 2025-07-14 13:53:50 -04:00
9fdf6e2b99 Fixed results table in test view 2025-07-14 13:53:50 -04:00
8f0cce7550 Fixed results table in test view 2025-07-14 13:53:50 -04:00
d97fee0911 Fixed results table in test view 2025-07-14 13:53:50 -04:00
de107761eb Added back reference associations for result; added table of results to test view 2025-07-14 13:53:50 -04:00
26ad420436 added missing import 2025-07-14 13:53:50 -04:00
47f3dcdbd1 Fixed result form stuff 2025-07-14 13:53:50 -04:00
6bb06205b0 Fixed float variable types 2025-07-14 13:53:50 -04:00
b508efd5a8 Added result creation route 2025-07-14 13:53:50 -04:00
e5a18f77f2 Added result model; added form to submit results 2025-07-14 13:53:50 -04:00
db41f0d6bd Added result model; added form to submit results 2025-07-14 13:53:50 -04:00
b93c8a5319 Added result model; added form to submit results 2025-07-14 13:53:50 -04:00
346b9e3911 Cleaned up runtime version 2025-07-14 13:53:50 -04:00
446590acb1 Added Go runtime version to the layout footer 2025-07-14 13:53:50 -04:00
a38ede5187 Added Go runtime version to the layout footer 2025-07-14 13:53:50 -04:00
2b3f24ee5c Fixed associating benchmarks to test 2025-07-14 13:53:50 -04:00
9104125de7 Fixed creating new test 2025-07-14 13:53:50 -04:00
5ad0810d81 Fixed creating new test 2025-07-14 13:53:50 -04:00
e27fd8d214 Added more information to the test view page 2025-07-14 13:53:50 -04:00
3910c4dc9c Fixed typo in test view route 2025-07-14 13:53:50 -04:00
3b8a30c7f8 Fixed typos in benchmark list view 2025-07-14 13:53:50 -04:00
293098e173 Fixed typos in model searches 2025-07-14 13:53:50 -04:00
10298cbf7c Fixed typo in benchmark list view 2025-07-14 13:53:50 -04:00
743a108add Fixed benchmark form submission error 2025-07-14 13:53:50 -04:00
c8a47b270a Fixed mix up between benchmark and hardware routes files 2025-07-14 13:53:50 -04:00
fde8d1dee4 Added benchmarks routes and views 2025-07-14 13:53:50 -04:00
bee64576bc Slight tweak to create test view 2025-07-14 13:53:50 -04:00
3c1d48a9ed Fixed form select field styling 2025-07-14 13:53:50 -04:00
633fc4ac02 Fixed variable capitalization in create test view 2025-07-14 13:53:50 -04:00
79924cd978 Added description field to Test model 2025-07-14 13:53:50 -04:00
e5443fb644 Passed hardware components and benchmarks to create test view 2025-07-14 13:53:50 -04:00
81a87e2761 Added route to view test 2025-07-14 13:53:50 -04:00
65550c983c Added route to view test 2025-07-14 13:53:50 -04:00
f3a2376c19 Added route to view test 2025-07-14 13:53:50 -04:00
99f45b3b3a Added route to view hardware component 2025-07-14 13:53:50 -04:00
4718aeaccd Added route to view hardware component 2025-07-14 13:53:50 -04:00
62654b9a3e Added route to view hardware component 2025-07-14 13:53:50 -04:00
93778274c2 Added hardware routes and views 2025-07-14 13:53:50 -04:00
7c13b1f209 Lots of changes 2025-07-14 13:53:50 -04:00
e09d399c98 Added ability to create projects 2025-07-14 13:53:50 -04:00
c9ad5df2ed Refactored models code to follow a better approach for opening and closing the database connection 2025-07-14 13:53:50 -04:00
0f89087134 Added models; started working on project routes 2025-07-14 13:53:50 -04:00
819baeb0c3 Added Gorm to project 2025-07-14 13:53:50 -04:00
fe303cb778 Copied some styles from Leviathan 2025-07-14 13:53:50 -04:00
98bd1f7eca Started working on the layout of the app 2025-07-14 13:53:50 -04:00
09a2667fc5 Updated Makefile to auto-define version 2025-07-14 13:53:50 -04:00
4020812549 Added templating, versioning, middleware, etc 2025-07-14 13:53:50 -04:00
8bc9849776 Better start for Flamego 2025-07-14 13:53:50 -04:00
17 changed files with 334 additions and 6 deletions

1
.gitignore vendored
View File

@@ -3,6 +3,7 @@ blt
# Local data files
data/
tmp/
# Compiled assets
public/css/

View File

@@ -1,4 +1,4 @@
pipeline:
steps:
build:
image: golang:1.22
commands:

54
README.md Normal file
View File

@@ -0,0 +1,54 @@
# Benchmark Logging Tool (BLT)
![Build badge](https://builds.metaunix.net/api/badges/87/status.svg)
Web-based tool to store and organize PC hardware benchmarks.
## Project Goals
The goals of this project are to:
* Record benchmarking results from multiple devices - e.g. log from a laptop or a phone.
* Group results into tests to keep track of different testing configurations.
* Encourage running tests multiple times - it's good practice to run a benchmark multiple times for accuracy.
* Create comparisons of hardware tests to compare performance.
* Generate graphs of hardware comparisons for usage in videos and articles.
## Requirements
BLT runs on Go. It uses the built-in `go mod` tool to manage dependencies, thus there is no external tooling to install to build/run BLT.
Debian/Ubuntu: `apt install -y golang`
RedHat and clones: `dnf install -y golang`
## Production Deployment
**TODO**
## Development
### Via Docker
**TODO**
### Local/Native Development
BLT uses [fresh](https://github.com/gravityblast/fresh) to auto-reload the app. While this is not strictly necessary, it used to make development more convenient. If you wish to forego installing it, you may simply build and run the app with the standard `go run main.go`.
1. Install dependencies:
`go mod download`
2. Install fresh to auto-reload the app:
`go install github.com/gravityblast/fresh@latest`
3. Run the app via air:
`fresh`
4. If everything is running successfully you can open your browser and go to http://localhost:2830.
## License
This project is available under the BSD 2-Clause license.

View File

@@ -1,6 +1,8 @@
package models
import (
"strconv"
"gorm.io/gorm"
)
@@ -16,3 +18,7 @@ type Benchmark struct {
// has many results
Results []Result
}
func (b *Benchmark) StringID() string {
return strconv.Itoa(int(b.ID))
}

View File

@@ -1,6 +1,8 @@
package models
import (
"strconv"
"gorm.io/gorm"
)
@@ -19,3 +21,24 @@ type Test struct {
// has many results
Results []Result
}
func (t *Test) SelectedBenchmarks() []string {
benchmarks := t.Benchmarks
ids := make([]string, len(benchmarks))
for i, b := range benchmarks {
ids[i] = strconv.Itoa(int(b.ID))
}
return ids
}
func (t *Test) IsBenchmarkSelected(benchmarkID uint) bool {
benchmarkUint := uint(benchmarkID)
for _, b := range t.Benchmarks {
if b.ID == benchmarkUint {
return true
}
}
return false
}

14
runner.conf Normal file
View File

@@ -0,0 +1,14 @@
root: .
tmp_path: ./tmp
build_name: runner-build
build_log: runner-build-errors.log
valid_ext: .go, .tpl, .tmpl, .html
no_rebuild_ext: .tpl, .tmpl, .html
ignored: assets, tmp, node_modules, data, vendor
build_delay: 600
colors: 1
log_color_main: cyan
log_color_build: yellow
log_color_runner: green
log_color_watcher: magenta
log_color_app:

View File

@@ -0,0 +1,38 @@
{{ template "header" . }}
<div class="row">
<h2>Editing Benchmark: {{ .benchmark.Name }}</h2>
<form class="twelve columns" action="/benchmark/{{ .benchmark.ID }}/edit" method="POST">
<div class="row">
<div class="nine columns">
<label for="benchmark_name">
Benchmark name:
<input id="benchmark_name" class="u-full-width" type="text" name="benchmark_name" placeholder="Unigine Heaven" value="{{ .benchmark.Name }}">
</label>
</div>
<div class="three columns">
<label for="benchmark_scoring">
Benchmark type:
<select id="benchmark_scoring" class="u-full-width" name="benchmark_scoring">
<option value="fps" {{ if eq .benchmark.ScoringType "fps" }}selected{{ end }}>Frames per second</option>
<option value="ms" {{ if eq .benchmark.ScoringType "ms" }}selected{{ end }}>Frame time</option>
<option value="pts" {{ if eq .benchmark.ScoringType "pts" }}selected{{ end }}>Total points</option>
</select>
</label>
</div>
</div>
<div class="row">
<label for="benchmark_description">
Benchmark description:
<textarea id="benchmark_description" class="twelve columns" cols="30" rows="10" name="benchmark_description">{{ .benchmark.Description }}</textarea>
</label>
</div>
<input class="button-primary u-full-width" type="submit" value="Submit">
</form>
</div>
{{ template "footer" . }}

View File

@@ -3,8 +3,12 @@
<div class="row">
<h2>{{ .benchmark.Name }}</h2>
<span><a href="/benchmark/{{ .benchmark.ID }}/edit">Edit</a></span>
<p>{{ .benchmark.ScoringType }}</p>
<p>{{ .benchmark.Description }}</p>
<hr>
<h4>Latest Benchmark Results:</h4>

View File

@@ -0,0 +1,34 @@
{{ template "header" . }}
<div class="row">
<h2>Add new hardware</h2>
<form class="twelve columns" action="/hardware/{{ .hardware.ID }}/edit" method="POST">
<div class="row">
<div class="nine columns">
<label for="hardware_name">
Hardware name:
<input id="hardware_name" class="u-full-width" type="text" name="hardware_name" placeholder="EVGA RTX 3080 Ti" value="{{ .hardware.Name }}">
</label>
</div>
<div class="three columns">
<label for="hardware_type">
Hardware type:
<select id="hardware_type" class="u-full-width" name="hardware_type">
<option value="cpu" {{ if eq .hardware.Type "cpu" }}selected{{ end }}>Processor</option>
<option value="mem" {{ if eq .hardware.Type "mem" }}selected{{ end }}>Memory</option>
<option value="gpu" {{ if eq .hardware.Type "gpu" }}selected{{ end }}>Graphics Card</option>
<option value="ssd" {{ if eq .hardware.Type "ssd" }}selected{{ end }}>Solid State Drive</option>
<option value="hdd" {{ if eq .hardware.Type "hdd" }}selected{{ end }}>Hard Drive</option>
<option value="nic" {{ if eq .hardware.Type "nic" }}selected{{ end }}>Network Card</option>
</select>
</label>
</div>
</div>
<input class="button-primary u-full-width" type="submit" value="Submit">
</form>
</div>
{{ template "footer" . }}

View File

@@ -3,6 +3,8 @@
<div class="row">
<h2>{{ .hardware.Name }}</h2>
<span><a href="/hardware/{{ .hardware.ID }}/edit">Edit</a></span>
<p>{{ .hardware.Type }}</p>
<hr>

View File

@@ -30,9 +30,9 @@
<label for="test_benchmarks">
Benchmarks to Test:
<select id="test_benchmarks" class="u-full-width" name="test_benchmarks" multiple>
{{ $testBenchmarks := .test.Benchmarks }}
{{ $selectedBenchmarks := .selectedBenchmarks }}
{{ range $bm := .benchmarks }}
<option value="{{ $bm.ID }}" {{ if contains $testBenchmarks $bm.ID }}selected{{ end }}>{{ $bm.Name }}</option>
<option value="{{ $bm.ID }}" {{ if contains $selectedBenchmarks $bm.StringID }}selected{{ end }}>{{ $bm.Name }}</option>
{{ end }}
</select>
</label>

View File

@@ -3,11 +3,15 @@
<div class="row">
<h2>{{ .test.Name }}</h2>
<p>Hardware tested: <a href="/hardware/{{ .test.Hardware.ID }}">{{ .test.Hardware.Name }}</a></p>
<span><a href="/test/{{ .test.ID }}/edit">Edit</a></span>
<p>{{ .test.Description }}</p>
<h4>Benchmarks used:</h4>
<h4>Test Info:</h4>
<p>Hardware tested: <a href="/hardware/{{ .test.Hardware.ID }}">{{ .test.Hardware.Name }}</a></p>
<p>Benchmarks used:</p>
<ul>
{{ range $bm := .test.Benchmarks }}

View File

@@ -4,5 +4,15 @@ type TestForm struct {
Name string `form:"test_name" validate:"required"`
Description string `form:"test_description"`
Hardware int `form:"test_hardware" validate:"required"`
Benchmarks []string `form:"test_benchmarks" validate:"required"`
Benchmarks []uint `form:"test_benchmarks" validate:"required"`
}
func (t *TestForm) IsBenchmarkSelected(checkID uint) bool {
for _, selectedID := range t.Benchmarks {
if checkID == selectedID {
return true
}
}
return false
}

View File

@@ -24,6 +24,9 @@ func RegisterRoutes(f *flamego.Flame) {
f.Post("/create", binding.Form(forms.HardwareForm{}), routes.HardwarePostCreate)
f.Get("/{hardware_id}", routes.HardwareGetView)
f.Get("/{hardware_id}/edit", routes.HardwareGetEdit)
f.Post("/{hardware_id}/edit", binding.Form(forms.HardwareForm{}), routes.HardwarePostEdit)
})
// benchmark routes
@@ -38,6 +41,9 @@ func RegisterRoutes(f *flamego.Flame) {
f.Post("/create", binding.Form(forms.BenchmarkForm{}), routes.BenchmarkPostCreate)
f.Get("/{benchmark_id}", routes.BenchmarkGetView)
f.Get("/{benchmark_id}/edit", routes.BenchmarkGetEdit)
f.Post("/{benchmark_id}/edit", binding.Form(forms.BenchmarkForm{}), routes.BenchmarkPostEdit)
})
// test routes
@@ -54,6 +60,7 @@ func RegisterRoutes(f *flamego.Flame) {
f.Group("/{test_id}", func() {
f.Get("", routes.TestGetView)
f.Get("/edit", routes.TestGetEdit)
f.Post("/edit", binding.Form(forms.TestForm{}), routes.TestPostEdit)
})
})

View File

@@ -64,3 +64,44 @@ func BenchmarkPostCreate(c flamego.Context, form forms.BenchmarkForm, errs bindi
c.Redirect(fmt.Sprintf("/benchmark/%d", benchmark.ID))
}
func BenchmarkGetEdit(c flamego.Context, t template.Template, data template.Data) {
// find benchmark ID from request
benchmarkID := c.Param("benchmark_id")
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
data["benchmark"] = benchmark
data["title"] = "Editing Benchmark"
t.HTML(http.StatusOK, "benchmark/edit")
}
func BenchmarkPostEdit(c flamego.Context, form forms.BenchmarkForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find benchmark ID from request
benchmarkID := c.Param("benchmark_id")
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
benchmark.Name = form.Name
benchmark.ScoringType = form.ScoringType
benchmark.Description = form.Description
models.DB.Save(&benchmark)
c.Redirect(fmt.Sprintf("/benchmark/%d", benchmark.ID))
}

View File

@@ -63,3 +63,43 @@ func HardwarePostCreate(c flamego.Context, form forms.HardwareForm, errs binding
c.Redirect(fmt.Sprintf("/hardware/%d", hardware.ID))
}
func HardwareGetEdit(c flamego.Context, t template.Template, data template.Data) {
// find hardware ID from request
hardwareID := c.Param("hardware_id")
// find hardware from DB
var hardware models.Hardware
models.DB.Preload("Tests.Benchmarks").First(&hardware, hardwareID)
data["hardware"] = hardware
data["title"] = "Edit Hardware"
t.HTML(http.StatusOK, "hardware/edit")
}
func HardwarePostEdit(c flamego.Context, form forms.HardwareForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find hardware ID from request
hardwareID := c.Param("hardware_id")
// find hardware from DB
var hardware models.Hardware
models.DB.Preload("Tests.Benchmarks").First(&hardware, hardwareID)
hardware.Name = form.Name
hardware.Type = form.Type
models.DB.Save(&hardware)
c.Redirect(fmt.Sprintf("/hardware/%d", hardware.ID))
}

View File

@@ -99,6 +99,56 @@ func TestGetEdit(c flamego.Context, t template.Template, data template.Data) {
models.DB.Find(&benchmarks)
data["benchmarks"] = benchmarks
// determine which benchmarks are selected in a test
selectedBenchmarks := test.SelectedBenchmarks()
data["selectedBenchmarks"] = selectedBenchmarks
data["title"] = fmt.Sprintf("Editing Test: %s", test.Name)
t.HTML(http.StatusOK, "test/edit")
}
func TestPostEdit(c flamego.Context, form forms.TestForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find test ID from request
testID := c.Param("test_id")
// find hardware from DB
var test models.Test
models.DB.Preload("Hardware").Preload("Benchmarks").First(&test, testID)
test.Name = form.Name
test.Description = form.Description
test.HardwareID = form.Hardware
// bind benchmarks to test that aren't already associated
for _, b := range form.Benchmarks {
if ! test.IsBenchmarkSelected(b) {
var benchmark models.Benchmark
models.DB.First(&benchmark, b) // find benchmark
models.DB.Model(&test).Association("Benchmarks").Append(&benchmark)
}
}
// removed associated benchmarks that weren't in the form
for _, b := range test.Benchmarks {
if ! form.IsBenchmarkSelected(b.ID) {
var benchmark models.Benchmark
models.DB.First(&benchmark, b) // find benchmark
models.DB.Model(&test).Association("Benchmarks").Delete(&benchmark)
}
}
models.DB.Save(&test)
c.Redirect(fmt.Sprintf("/test/%d", test.ID))
}