8 Commits

Author SHA1 Message Date
7c6977f4a6 Adding event: tag to the asset packaging step
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2025-07-14 13:53:50 -04:00
fcee38d33b Updating Grunt commands 2025-07-14 13:53:50 -04:00
0cc7113a8b Started working on test edit page; removed nodemon because it wasn't working 2025-07-14 13:53:50 -04:00
107a77d6f8 Added nodemon to dev dependencies to automatially reload the app on changes 2025-07-14 13:53:50 -04:00
1a3048b5f2 Fixed typo in Gruntfile; added Make tasks to compile frontend assets; Modernized the gathering of benchmark result data for the test view 2025-07-14 13:53:50 -04:00
4b9065ca4b Improved some styles 2025-07-14 13:53:50 -04:00
acdac071ed [Issue #8] - Licensing project under the BSD 2-Clause license (#10)
Adding license to project

Co-authored-by: Gregory Ballantine <gballantine@bitgoblin.tech>
Reviewed-on: #10
2025-07-14 13:53:50 -04:00
90472e443c 3-add-gruntjs (#9)
Adding Grunt.js to compile SASS and CoffeeScript assets

Co-authored-by: Gregory Ballantine <gregory.w.ballantine@nasa.gov>
Co-authored-by: Gregory Ballantine <gballantine555@gmail.com>
Reviewed-on: #9
2025-07-14 13:53:50 -04:00
24 changed files with 32 additions and 510 deletions

1
.gitignore vendored
View File

@@ -3,7 +3,6 @@ blt
# Local data files
data/
tmp/
# Compiled assets
public/css/

View File

@@ -1,4 +1,4 @@
steps:
pipeline:
build:
image: golang:1.22
commands:

View File

@@ -1,54 +0,0 @@
# Benchmark Logging Tool (BLT)
![Build badge](https://builds.metaunix.net/api/badges/87/status.svg)
Web-based tool to store and organize PC hardware benchmarks.
## Project Goals
The goals of this project are to:
* Record benchmarking results from multiple devices - e.g. log from a laptop or a phone.
* Group results into tests to keep track of different testing configurations.
* Encourage running tests multiple times - it's good practice to run a benchmark multiple times for accuracy.
* Create comparisons of hardware tests to compare performance.
* Generate graphs of hardware comparisons for usage in videos and articles.
## Requirements
BLT runs on Go. It uses the built-in `go mod` tool to manage dependencies, thus there is no external tooling to install to build/run BLT.
Debian/Ubuntu: `apt install -y golang`
RedHat and clones: `dnf install -y golang`
## Production Deployment
**TODO**
## Development
### Via Docker
**TODO**
### Local/Native Development
BLT uses [fresh](https://github.com/gravityblast/fresh) to auto-reload the app. While this is not strictly necessary, it used to make development more convenient. If you wish to forego installing it, you may simply build and run the app with the standard `go run main.go`.
1. Install dependencies:
`go mod download`
2. Install fresh to auto-reload the app:
`go install github.com/gravityblast/fresh@latest`
3. Run the app via air:
`fresh`
4. If everything is running successfully you can open your browser and go to http://localhost:2830.
## License
This project is available under the BSD 2-Clause license.

View File

@@ -1,27 +0,0 @@
package models
import (
"strconv"
"gorm.io/gorm"
)
type BenchmarkProfile struct {
gorm.Model
Label string
Settings string
// belongs to Benchmark
BenchmarkID int
Benchmark Benchmark
// many-to-many with tests
Tests []Test `gorm:"many2many:tests_benchmark_profiles;"`
// has many results
Results []Result
}
func (b *BenchmarkProfile) StringID() string {
return strconv.Itoa(int(b.ID))
}

View File

@@ -1,8 +1,6 @@
package models
import (
"strconv"
"gorm.io/gorm"
)
@@ -12,10 +10,9 @@ type Benchmark struct {
ScoringType string
Description string
// one-to-many BenchmarkProfiles
BenchmarkProfiles []BenchmarkProfile
}
// many-to-many test
Tests []Test `gorm:"many2many:tests_benchmarks;"`
func (b *Benchmark) StringID() string {
return strconv.Itoa(int(b.ID))
// has many results
Results []Result
}

View File

@@ -11,8 +11,8 @@ type Result struct {
MaximumScore float32
// belongs to Benchmark
BenchmarkProfileID int
BenchmarkProfile BenchmarkProfile
BenchmarkID int
Benchmark Benchmark
// belongs to Test
TestID int

View File

@@ -1,8 +1,6 @@
package models
import (
"strconv"
"gorm.io/gorm"
)
@@ -15,30 +13,9 @@ type Test struct {
HardwareID int
Hardware Hardware
// many-to-many benchmark profiles
BenchmarkProfiles []Benchmark `gorm:"many2many:tests_benchmark_profiles;"`
// many-to-many benchmarks
Benchmarks []Benchmark `gorm:"many2many:tests_benchmarks;"`
// has many results
Results []Result
}
func (t *Test) SelectedBenchmarks() []string {
benchmarks := t.BenchmarkProfiles
ids := make([]string, len(benchmarks))
for i, b := range benchmarks {
ids[i] = strconv.Itoa(int(b.ID))
}
return ids
}
func (t *Test) IsBenchmarkSelected(benchmarkID uint) bool {
benchmarkUint := uint(benchmarkID)
for _, b := range t.BenchmarkProfiles {
if b.ID == benchmarkUint {
return true
}
}
return false
}

View File

@@ -1,14 +0,0 @@
root: .
tmp_path: ./tmp
build_name: runner-build
build_log: runner-build-errors.log
valid_ext: .go, .tpl, .tmpl, .html
no_rebuild_ext: .tpl, .tmpl, .html
ignored: assets, tmp, node_modules, data, vendor
build_delay: 600
colors: 1
log_color_main: cyan
log_color_build: yellow
log_color_runner: green
log_color_watcher: magenta
log_color_app:

View File

@@ -1,38 +0,0 @@
{{ template "header" . }}
<div class="row">
<h2>Editing Benchmark: {{ .benchmark.Name }}</h2>
<form class="twelve columns" action="/benchmark/{{ .benchmark.ID }}/edit" method="POST">
<div class="row">
<div class="nine columns">
<label for="benchmark_name">
Benchmark name:
<input id="benchmark_name" class="u-full-width" type="text" name="benchmark_name" placeholder="Unigine Heaven" value="{{ .benchmark.Name }}">
</label>
</div>
<div class="three columns">
<label for="benchmark_scoring">
Benchmark type:
<select id="benchmark_scoring" class="u-full-width" name="benchmark_scoring">
<option value="fps" {{ if eq .benchmark.ScoringType "fps" }}selected{{ end }}>Frames per second</option>
<option value="ms" {{ if eq .benchmark.ScoringType "ms" }}selected{{ end }}>Frame time</option>
<option value="pts" {{ if eq .benchmark.ScoringType "pts" }}selected{{ end }}>Total points</option>
</select>
</label>
</div>
</div>
<div class="row">
<label for="benchmark_description">
Benchmark description:
<textarea id="benchmark_description" class="twelve columns" cols="30" rows="10" name="benchmark_description">{{ .benchmark.Description }}</textarea>
</label>
</div>
<input class="button-primary u-full-width" type="submit" value="Submit">
</form>
</div>
{{ template "footer" . }}

View File

@@ -1,47 +0,0 @@
{{ template "header" . }}
<div class="row">
<h2>Add benchmark settings profile</h2>
<form class="twelve columns" action="/benchmark/profile/add" method="POST">
<div class="row">
<div class="six columns">
<label for="benchmark_profile_benchmark">
Benchmark:
{{ if .benchmark }}
<select id="benchmark_profile_benchmark" class="u-full-width" disabled>
<option value="{{ .benchmark.ID }}">{{ .benchmark.Name }}</option>
</select>
<input type="hidden" name="benchmark_profile_benchmark" value="{{ .benchmark.ID }}">
{{ end }}
{{ if .benchmarks }}
<select id="benchmark_profile_benchmark" class="u-full-width" name="benchmark_profile_benchmark">
{{ range $benchmark := .benchmarks }}
<option value="{{ $benchmark.ID }}">{{ $benchmark.Name }}</option>
{{ end }}
</select>
{{ end }}
</label>
</div>
<div class="six columns">
<label for="benchmark_profile_label">
Profile label:
<input id="benchmark_profile_label" class="u-full-width" type="text" name="benchmark_profile_label" placeholder="My benchmark settings profile">
</label>
</div>
</div>
<div class="row">
<label for="benchmark_description">
Benchmark description:
<textarea id="benchmark_description" class="twelve columns" cols="30" rows="10" name="benchmark_description"></textarea>
</label>
</div>
<input class="button-primary u-full-width" type="submit" value="Submit">
</form>
</div>
{{ template "footer" . }}

View File

@@ -3,30 +3,8 @@
<div class="row">
<h2>{{ .benchmark.Name }}</h2>
<span><a href="/benchmark/{{ .benchmark.ID }}/edit">Edit</a></span>
<p>{{ .benchmark.ScoringType }}</p>
<p>{{ .benchmark.Description }}</p>
<hr>
<h4>Profiles for this Benchmark</h4>
<p><a href="/benchmark/profile/add?benchmark={{ .benchmark.ID }}">Add settings profile</a></p>
<ul class="benchmark-profiles">
{{ range $profile := .benchmark.BenchmarkProfiles }}
<li>
{{ $profile.Label }}
<span>
<a href="/benchmark/profile/{{ $profile.ID }}/edit">Edit</a>
<a href="/benchmark/profile/{{ $profile.ID }}/delete">Delete</a>
</span>
</li>
{{ end }}
</ul>
<hr>
<h4>Latest Benchmark Results:</h4>

View File

@@ -1,34 +0,0 @@
{{ template "header" . }}
<div class="row">
<h2>Add new hardware</h2>
<form class="twelve columns" action="/hardware/{{ .hardware.ID }}/edit" method="POST">
<div class="row">
<div class="nine columns">
<label for="hardware_name">
Hardware name:
<input id="hardware_name" class="u-full-width" type="text" name="hardware_name" placeholder="EVGA RTX 3080 Ti" value="{{ .hardware.Name }}">
</label>
</div>
<div class="three columns">
<label for="hardware_type">
Hardware type:
<select id="hardware_type" class="u-full-width" name="hardware_type">
<option value="cpu" {{ if eq .hardware.Type "cpu" }}selected{{ end }}>Processor</option>
<option value="mem" {{ if eq .hardware.Type "mem" }}selected{{ end }}>Memory</option>
<option value="gpu" {{ if eq .hardware.Type "gpu" }}selected{{ end }}>Graphics Card</option>
<option value="ssd" {{ if eq .hardware.Type "ssd" }}selected{{ end }}>Solid State Drive</option>
<option value="hdd" {{ if eq .hardware.Type "hdd" }}selected{{ end }}>Hard Drive</option>
<option value="nic" {{ if eq .hardware.Type "nic" }}selected{{ end }}>Network Card</option>
</select>
</label>
</div>
</div>
<input class="button-primary u-full-width" type="submit" value="Submit">
</form>
</div>
{{ template "footer" . }}

View File

@@ -3,8 +3,6 @@
<div class="row">
<h2>{{ .hardware.Name }}</h2>
<span><a href="/hardware/{{ .hardware.ID }}/edit">Edit</a></span>
<p>{{ .hardware.Type }}</p>
<hr>

View File

@@ -30,9 +30,9 @@
<label for="test_benchmarks">
Benchmarks to Test:
<select id="test_benchmarks" class="u-full-width" name="test_benchmarks" multiple>
{{ $selectedBenchmarks := .selectedBenchmarks }}
{{ $testBenchmarks := .test.Benchmarks }}
{{ range $bm := .benchmarks }}
<option value="{{ $bm.ID }}" {{ if contains $selectedBenchmarks $bm.StringID }}selected{{ end }}>{{ $bm.Name }}</option>
<option value="{{ $bm.ID }}" {{ if contains $testBenchmarks $bm.ID }}selected{{ end }}>{{ $bm.Name }}</option>
{{ end }}
</select>
</label>

View File

@@ -3,15 +3,11 @@
<div class="row">
<h2>{{ .test.Name }}</h2>
<span><a href="/test/{{ .test.ID }}/edit">Edit</a></span>
<p>Hardware tested: <a href="/hardware/{{ .test.Hardware.ID }}">{{ .test.Hardware.Name }}</a></p>
<p>{{ .test.Description }}</p>
<h4>Test Info:</h4>
<p>Hardware tested: <a href="/hardware/{{ .test.Hardware.ID }}">{{ .test.Hardware.Name }}</a></p>
<p>Benchmarks used:</p>
<h4>Benchmarks used:</h4>
<ul>
{{ range $bm := .test.Benchmarks }}

View File

@@ -1,7 +0,0 @@
package forms
type BenchmarkProfileForm struct {
BenchmarkID int `form:"benchmark_profile_benchmark" validate:"required"`
Label string `form:"benchmark_profile_label" validate:"required"`
Settings string `form:"benchmark_profile_settings" validate:"required"`
}

View File

@@ -1,9 +1,9 @@
package forms
type ResultForm struct {
Test int `form:"result_test" validate:"required"`
BenchmarkProfile int `form:"result_benchmark" validate:"required"`
AverageScore float32 `form:"result_avg" validate:"required"`
MinimumScore float32 `form:"result_min"`
MaximumScore float32 `form:"result_max"`
Test int `form:"result_test" validate:"required"`
Benchmark int `form:"result_benchmark" validate:"required"`
AverageScore float32 `form:"result_avg" validate:"required"`
MinimumScore float32 `form:"result_min"`
MaximumScore float32 `form:"result_max"`
}

View File

@@ -1,18 +1,8 @@
package forms
type TestForm struct {
Name string `form:"test_name" validate:"required"`
Description string `form:"test_description"`
Hardware int `form:"test_hardware" validate:"required"`
BenchmarkProfiles []uint `form:"test_benchmarks" validate:"required"`
}
func (t *TestForm) IsBenchmarkSelected(checkID uint) bool {
for _, selectedID := range t.BenchmarkProfiles {
if checkID == selectedID {
return true
}
}
return false
Name string `form:"test_name" validate:"required"`
Description string `form:"test_description"`
Hardware int `form:"test_hardware" validate:"required"`
Benchmarks []string `form:"test_benchmarks" validate:"required"`
}

View File

@@ -24,9 +24,6 @@ func RegisterRoutes(f *flamego.Flame) {
f.Post("/create", binding.Form(forms.HardwareForm{}), routes.HardwarePostCreate)
f.Get("/{hardware_id}", routes.HardwareGetView)
f.Get("/{hardware_id}/edit", routes.HardwareGetEdit)
f.Post("/{hardware_id}/edit", binding.Form(forms.HardwareForm{}), routes.HardwarePostEdit)
})
// benchmark routes
@@ -41,14 +38,6 @@ func RegisterRoutes(f *flamego.Flame) {
f.Post("/create", binding.Form(forms.BenchmarkForm{}), routes.BenchmarkPostCreate)
f.Get("/{benchmark_id}", routes.BenchmarkGetView)
f.Get("/{benchmark_id}/edit", routes.BenchmarkGetEdit)
f.Post("/{benchmark_id}/edit", binding.Form(forms.BenchmarkForm{}), routes.BenchmarkPostEdit)
f.Group("/profile", func() {
f.Get("/add", routes.BenchmarkGetProfileAdd)
f.Post("/add", binding.Form(forms.BenchmarkProfileForm{}), routes.BenchmarkPostProfileAdd)
})
})
// test routes
@@ -65,7 +54,6 @@ func RegisterRoutes(f *flamego.Flame) {
f.Group("/{test_id}", func() {
f.Get("", routes.TestGetView)
f.Get("/edit", routes.TestGetEdit)
f.Post("/edit", binding.Form(forms.TestForm{}), routes.TestPostEdit)
})
})
@@ -81,10 +69,6 @@ func RegisterRoutes(f *flamego.Flame) {
f.Get("/details", routes.ApiV1BenchmarkDetails)
})
f.Group("/benchmark_profile", func() {
f.Get("/details", routes.ApiV1BenchmarkProfileDetails)
})
f.Group("/result", func() {
f.Post("/add", binding.Form(forms.ResultForm{}), routes.ApiV1ResultAdd)
f.Get("/list", routes.ApiV1ResultList)

View File

@@ -18,23 +18,11 @@ func ApiV1BenchmarkDetails(c flamego.Context, r flamego.Render) {
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
// return JSON response
r.JSON(200, benchmark)
}
func ApiV1BenchmarkProfileDetails(c flamego.Context, r flamego.Render) {
// find benchmark ID from request
benchmarkProfileID := c.Query("benchmark_profile_id")
// find benchmark from DB
var benchmarkProfile models.BenchmarkProfile
models.DB.Preload("BenchmarkProfiles").First(&benchmarkProfile, benchmarkProfileID)
// return JSON response
r.JSON(200, benchmarkProfile)
}
func ApiV1ResultAdd(c flamego.Context, form forms.ResultForm, errs binding.Errors, r flamego.Render) {
if len(errs) > 0 {
var err error
@@ -48,8 +36,8 @@ func ApiV1ResultAdd(c flamego.Context, form forms.ResultForm, errs binding.Error
}
result := models.Result{
TestID: form.Test,
BenchmarkProfileID: form.BenchmarkProfile,
TestID: form.Test,
BenchmarkID: form.Benchmark,
AverageScore: form.AverageScore,
MinimumScore: form.MinimumScore,
MaximumScore: form.MaximumScore,
@@ -62,12 +50,12 @@ func ApiV1ResultAdd(c flamego.Context, form forms.ResultForm, errs binding.Error
func ApiV1ResultList(c flamego.Context, r flamego.Render) {
// find benchmark and test IDs from request
benchmarkProfileID := c.Query("benchmark_id")
benchmarkID := c.Query("benchmark_id")
testID := c.Query("test_id")
// find results from the DB that match the benchmark and test
var results []models.Result
models.DB.Where("test_id = ? AND benchmark_profile_id = ?", testID, benchmarkProfileID).Find(&results)
models.DB.Where("test_id = ? AND benchmark_id = ?", testID, benchmarkID).Find(&results)
// return JSON response
r.JSON(200, results)

View File

@@ -30,7 +30,7 @@ func BenchmarkGetView(c flamego.Context, t template.Template, data template.Data
// find benchmark from DB
var benchmark models.Benchmark
models.DB.Preload("BenchmarkProfiles").First(&benchmark, benchmarkID)
models.DB.First(&benchmark, benchmarkID)
data["benchmark"] = benchmark
data["title"] = benchmark.Name
@@ -64,77 +64,3 @@ func BenchmarkPostCreate(c flamego.Context, form forms.BenchmarkForm, errs bindi
c.Redirect(fmt.Sprintf("/benchmark/%d", benchmark.ID))
}
func BenchmarkGetEdit(c flamego.Context, t template.Template, data template.Data) {
// find benchmark ID from request
benchmarkID := c.Param("benchmark_id")
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
data["benchmark"] = benchmark
data["title"] = "Editing Benchmark"
t.HTML(http.StatusOK, "benchmark/edit")
}
func BenchmarkPostEdit(c flamego.Context, form forms.BenchmarkForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find benchmark ID from request
benchmarkID := c.Param("benchmark_id")
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
benchmark.Name = form.Name
benchmark.ScoringType = form.ScoringType
benchmark.Description = form.Description
models.DB.Save(&benchmark)
c.Redirect(fmt.Sprintf("/benchmark/%d", benchmark.ID))
}
func BenchmarkGetProfileAdd(c flamego.Context, t template.Template, data template.Data) {
// find benchmark ID from GET parameters
benchmarkID := c.Query("benchmark")
// check if the query parameter was used
if benchmarkID != "" {
// find benchmark from DB
var benchmark models.Benchmark
models.DB.First(&benchmark, benchmarkID)
data["benchmark"] = benchmark
} else {
// find all benchmarks from DB
var benchmarks []models.Benchmark
models.DB.Find(&benchmarks)
data["benchmarks"] = benchmarks
}
data["title"] = "Add new benchmark settings profile"
t.HTML(http.StatusOK, "benchmark/profile/add")
}
func BenchmarkPostProfileAdd(c flamego.Context, form forms.BenchmarkProfileForm, errs binding.Errors) {
benchmarkProfile := models.BenchmarkProfile{
BenchmarkID: form.BenchmarkID,
Label: form.Label,
Settings: form.Settings,
}
_ = models.DB.Create(&benchmarkProfile)
c.Redirect(fmt.Sprintf("/benchmark/%d", form.BenchmarkID))
}

View File

@@ -63,43 +63,3 @@ func HardwarePostCreate(c flamego.Context, form forms.HardwareForm, errs binding
c.Redirect(fmt.Sprintf("/hardware/%d", hardware.ID))
}
func HardwareGetEdit(c flamego.Context, t template.Template, data template.Data) {
// find hardware ID from request
hardwareID := c.Param("hardware_id")
// find hardware from DB
var hardware models.Hardware
models.DB.Preload("Tests.Benchmarks").First(&hardware, hardwareID)
data["hardware"] = hardware
data["title"] = "Edit Hardware"
t.HTML(http.StatusOK, "hardware/edit")
}
func HardwarePostEdit(c flamego.Context, form forms.HardwareForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find hardware ID from request
hardwareID := c.Param("hardware_id")
// find hardware from DB
var hardware models.Hardware
models.DB.Preload("Tests.Benchmarks").First(&hardware, hardwareID)
hardware.Name = form.Name
hardware.Type = form.Type
models.DB.Save(&hardware)
c.Redirect(fmt.Sprintf("/hardware/%d", hardware.ID))
}

View File

@@ -26,7 +26,7 @@ func ResultPostCreate(c flamego.Context, form forms.ResultForm, errs binding.Err
result := models.Result{
TestID: form.Test,
BenchmarkProfileID: form.BenchmarkProfile,
BenchmarkID: form.Benchmark,
AverageScore: form.AverageScore,
MinimumScore: form.MinimumScore,
MaximumScore: form.MaximumScore,

View File

@@ -73,10 +73,10 @@ func TestPostCreate(c flamego.Context, form forms.TestForm, errs binding.Errors)
_ = models.DB.Create(&test)
// bind benchmarks to test
for _, v := range form.BenchmarkProfiles {
for _, v := range form.Benchmarks {
var benchmark models.Benchmark
models.DB.First(&benchmark, v) // find benchmark
models.DB.Model(&test).Association("BenchmarkProfiles").Append(&benchmark)
models.DB.Model(&test).Association("Benchmarks").Append(&benchmark)
}
c.Redirect(fmt.Sprintf("/test/%d", test.ID))
@@ -86,7 +86,7 @@ func TestGetEdit(c flamego.Context, t template.Template, data template.Data) {
// find test in DB
testID := c.Param("test_id")
var test models.Test
models.DB.Preload("Hardware").Preload("BenchmarkProfiles").First(&test, testID)
models.DB.Preload("Hardware").Preload("Benchmarks").First(&test, testID)
data["test"] = test
// add hardware components to template
@@ -99,56 +99,6 @@ func TestGetEdit(c flamego.Context, t template.Template, data template.Data) {
models.DB.Find(&benchmarks)
data["benchmarks"] = benchmarks
// determine which benchmarks are selected in a test
selectedBenchmarks := test.SelectedBenchmarks()
data["selectedBenchmarks"] = selectedBenchmarks
data["title"] = fmt.Sprintf("Editing Test: %s", test.Name)
t.HTML(http.StatusOK, "test/edit")
}
func TestPostEdit(c flamego.Context, form forms.TestForm, errs binding.Errors) {
if len(errs) > 0 {
var err error
switch errs[0].Category {
case binding.ErrorCategoryValidation:
err = errs[0].Err.(validator.ValidationErrors)[0]
default:
err = errs[0].Err
}
log.Fatal(err)
}
// find test ID from request
testID := c.Param("test_id")
// find hardware from DB
var test models.Test
models.DB.Preload("Hardware").Preload("BenchmarkProfiles").First(&test, testID)
test.Name = form.Name
test.Description = form.Description
test.HardwareID = form.Hardware
// bind benchmarks to test that aren't already associated
for _, b := range form.BenchmarkProfiles {
if ! test.IsBenchmarkSelected(b) {
var benchmark models.BenchmarkProfile
models.DB.First(&benchmark, b) // find benchmark
models.DB.Model(&test).Association("BenchmarkProfiles").Append(&benchmark)
}
}
// removed associated benchmarks that weren't in the form
for _, b := range test.BenchmarkProfiles {
if ! form.IsBenchmarkSelected(b.ID) {
var benchmark models.BenchmarkProfile
models.DB.First(&benchmark, b) // find benchmark
models.DB.Model(&test).Association("BenchmarkProfiles").Delete(&benchmark)
}
}
models.DB.Save(&test)
c.Redirect(fmt.Sprintf("/test/%d", test.ID))
}