Compare commits
1 Commits
62d74beba4
...
agent/rex/
| Author | SHA1 | Date | |
|---|---|---|---|
| e2be3bffa7 |
@@ -1,25 +1,37 @@
|
||||
# Build stage
|
||||
FROM golang:1.24-alpine AS builder
|
||||
# ── Stage 1: Build ──────────────────────────────────────────
|
||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
||||
WORKDIR /src
|
||||
|
||||
# Copy csproj first for layer caching — restores before copying source
|
||||
COPY Extrudex.csproj .
|
||||
RUN dotnet restore
|
||||
|
||||
# Copy the rest of the source
|
||||
COPY . .
|
||||
RUN dotnet publish Extrudex.csproj \
|
||||
-c Release \
|
||||
-o /app/publish \
|
||||
--no-restore
|
||||
|
||||
# ── Stage 2: Runtime ────────────────────────────────────────
|
||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS runtime
|
||||
WORKDIR /app
|
||||
|
||||
# Copy go mod files first for caching
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
# Install curl for health check (not included in aspnet base image)
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy source and build
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o server ./cmd/server
|
||||
# Non-root user for security
|
||||
RUN adduser --disabled-password --gecos "" appuser
|
||||
USER appuser
|
||||
|
||||
# Final stage
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add ca-certificates
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy binary from builder
|
||||
COPY --from=builder /app/server .
|
||||
# Copy published output from build stage
|
||||
COPY --from=build /app/publish .
|
||||
|
||||
# ASP.NET Core listens on 8080 by default in .NET 8+
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["./server"]
|
||||
# Health check against /health endpoint
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
|
||||
CMD curl --fail http://localhost:8080/health || exit 1
|
||||
|
||||
ENTRYPOINT ["dotnet", "Extrudex.dll"]
|
||||
@@ -1,80 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/db"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/router"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Setup structured logging
|
||||
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{
|
||||
Level: slog.LevelInfo,
|
||||
})))
|
||||
|
||||
// Load configuration
|
||||
cfg, err := config.Load()
|
||||
if err != nil {
|
||||
slog.Error("failed to load config", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
slog.Info("config loaded", "port", cfg.Port, "cors_origin", cfg.CorsOrigin)
|
||||
|
||||
// Connect to database
|
||||
dbPool, err := db.NewPool(cfg.DatabaseURL)
|
||||
if err != nil {
|
||||
slog.Error("failed to connect to database", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer db.ClosePool(dbPool)
|
||||
|
||||
slog.Info("database connected")
|
||||
|
||||
// Create router
|
||||
r := router.New(cfg, dbPool)
|
||||
|
||||
// Create HTTP server
|
||||
server := &http.Server{
|
||||
Addr: ":" + cfg.Port,
|
||||
Handler: r,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
WriteTimeout: 15 * time.Second,
|
||||
IdleTimeout: 60 * time.Second,
|
||||
}
|
||||
|
||||
// Start server in goroutine
|
||||
go func() {
|
||||
slog.Info("server starting", "addr", server.Addr)
|
||||
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
slog.Error("server error", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for shutdown signal
|
||||
quit := make(chan os.Signal, 1)
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
|
||||
slog.Info("server shutting down")
|
||||
|
||||
// Graceful shutdown
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := server.Shutdown(ctx); err != nil {
|
||||
slog.Error("server shutdown error", "error", err)
|
||||
}
|
||||
|
||||
db.ClosePool(dbPool)
|
||||
slog.Info("server stopped")
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
module github.com/CubeCraft-Creations/Extrudex/backend
|
||||
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/go-chi/chi/v5 v5.2.0
|
||||
github.com/jackc/pgx/v5 v5.7.4
|
||||
github.com/kelseyhightower/envconfig v1.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
golang.org/x/crypto v0.31.0 // indirect
|
||||
golang.org/x/sync v0.10.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
)
|
||||
@@ -1,32 +0,0 @@
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-chi/chi/v5 v5.2.0 h1:Aj1EtB0qR2Rdo2dG4O94RIU35w2lvQSj6BRA4+qwFL0=
|
||||
github.com/go-chi/chi/v5 v5.2.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg=
|
||||
github.com/jackc/pgx/v5 v5.7.4/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=
|
||||
github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
@@ -1,24 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/kelseyhightower/envconfig"
|
||||
)
|
||||
|
||||
// Config holds all application configuration loaded from environment variables.
|
||||
type Config struct {
|
||||
DatabaseURL string `envconfig:"database_url" required:"true"`
|
||||
Port string `envconfig:"port" default:"8080"`
|
||||
CorsOrigin string `envconfig:"cors_origin" default:"*"`
|
||||
LogLevel string `envconfig:"log_level" default:"info"`
|
||||
}
|
||||
|
||||
// Load reads configuration from environment variables and returns a populated Config.
|
||||
func Load() (*Config, error) {
|
||||
var cfg Config
|
||||
if err := envconfig.Process("", &cfg); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config: %w", err)
|
||||
}
|
||||
return &cfg, nil
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// NewPool creates a new pgx connection pool and verifies connectivity with a ping.
|
||||
func NewPool(databaseURL string) (*pgxpool.Pool, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
pool, err := pgxpool.New(ctx, databaseURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create db pool: %w", err)
|
||||
}
|
||||
|
||||
if err := pool.Ping(ctx); err != nil {
|
||||
pool.Close()
|
||||
return nil, fmt.Errorf("failed to ping db: %w", err)
|
||||
}
|
||||
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
// ClosePool gracefully closes the connection pool.
|
||||
func ClosePool(pool *pgxpool.Pool) {
|
||||
if pool != nil {
|
||||
pool.Close()
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
// Package dtos defines request/response data transfer objects for the Extrudex API.
|
||||
// DTOs keep HTTP serialization concerns separate from domain models.
|
||||
package dtos
|
||||
|
||||
// ============================================================================
|
||||
// Common Response Wrappers
|
||||
// ============================================================================
|
||||
|
||||
// ListResponse wraps a paginated collection response.
|
||||
type ListResponse struct {
|
||||
Data any `json:"data"`
|
||||
Total int `json:"total"`
|
||||
Limit int `json:"limit"`
|
||||
Offset int `json:"offset"`
|
||||
}
|
||||
|
||||
// SingleResponse wraps a single-item response.
|
||||
type SingleResponse struct {
|
||||
Data any `json:"data"`
|
||||
}
|
||||
|
||||
// ErrorResponse is the standard error payload for all API errors.
|
||||
type ErrorResponse struct {
|
||||
Error string `json:"error"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Filament DTOs
|
||||
// ============================================================================
|
||||
|
||||
// CreateFilamentRequest is the POST body for creating a new filament spool.
|
||||
type CreateFilamentRequest struct {
|
||||
Name string `json:"name"`
|
||||
MaterialBaseID int `json:"material_base_id"`
|
||||
MaterialFinishID int `json:"material_finish_id"`
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
ColorHex string `json:"color_hex"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM *float64 `json:"diameter_mm,omitempty"` // defaults to 1.75
|
||||
InitialGrams int `json:"initial_grams"`
|
||||
RemainingGrams int `json:"remaining_grams"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"` // defaults to 50
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
}
|
||||
|
||||
// UpdateFilamentRequest is the PUT body for partially updating a filament spool.
|
||||
// All fields are optional — only non-nil fields are applied.
|
||||
type UpdateFilamentRequest struct {
|
||||
Name *string `json:"name,omitempty"`
|
||||
MaterialBaseID *int `json:"material_base_id,omitempty"`
|
||||
MaterialFinishID *int `json:"material_finish_id,omitempty"`
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
ColorHex *string `json:"color_hex,omitempty"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM *float64 `json:"diameter_mm,omitempty"`
|
||||
InitialGrams *int `json:"initial_grams,omitempty"`
|
||||
RemainingGrams *int `json:"remaining_grams,omitempty"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
}
|
||||
@@ -1,273 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
"github.com/go-chi/chi/v5"
|
||||
)
|
||||
|
||||
// FilamentHandler handles HTTP requests for filament spool CRUD operations.
|
||||
type FilamentHandler struct {
|
||||
service *services.FilamentService
|
||||
}
|
||||
|
||||
// NewFilamentHandler creates a FilamentHandler with the given service.
|
||||
func NewFilamentHandler(service *services.FilamentService) *FilamentHandler {
|
||||
return &FilamentHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/filaments — returns paginated, filtered spools.
|
||||
func (h *FilamentHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.FilamentFilter{
|
||||
Material: r.URL.Query().Get("material"),
|
||||
Finish: r.URL.Query().Get("finish"),
|
||||
Color: r.URL.Query().Get("color"),
|
||||
LowStock: r.URL.Query().Get("low_stock") == "true",
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
spools, total, err := h.service.List(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list filaments", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: spools,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
|
||||
// Get handles GET /api/filaments/{id} — returns a single spool.
|
||||
func (h *FilamentHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
spool, err := h.service.GetByID(r.Context(), id)
|
||||
if err != nil {
|
||||
slog.Error("failed to get filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if spool == nil {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: spool})
|
||||
}
|
||||
|
||||
// Create handles POST /api/filaments — creates a new filament spool.
|
||||
func (h *FilamentHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
var req dtos.CreateFilamentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid request body",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Validate required fields.
|
||||
if err := services.ValidateCreateFilamentRequest(req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "validation failed: " + err.Error(),
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Build domain model.
|
||||
spool := models.FilamentSpool{
|
||||
Name: req.Name,
|
||||
MaterialBaseID: req.MaterialBaseID,
|
||||
MaterialFinishID: req.MaterialFinishID,
|
||||
MaterialModifierID: req.MaterialModifierID,
|
||||
ColorHex: req.ColorHex,
|
||||
Brand: req.Brand,
|
||||
DiameterMM: 1.75, // default
|
||||
InitialGrams: req.InitialGrams,
|
||||
RemainingGrams: req.RemainingGrams,
|
||||
SpoolWeightGrams: req.SpoolWeightGrams,
|
||||
CostUSD: req.CostUSD,
|
||||
LowStockThresholdGrams: 50, // default
|
||||
Notes: req.Notes,
|
||||
Barcode: req.Barcode,
|
||||
}
|
||||
if req.DiameterMM != nil {
|
||||
spool.DiameterMM = *req.DiameterMM
|
||||
}
|
||||
if req.LowStockThresholdGrams != nil {
|
||||
spool.LowStockThresholdGrams = *req.LowStockThresholdGrams
|
||||
}
|
||||
|
||||
created, err := h.service.Create(r.Context(), &spool)
|
||||
if err != nil {
|
||||
slog.Error("failed to create filament", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, dtos.SingleResponse{Data: created})
|
||||
}
|
||||
|
||||
// Update handles PUT /api/filaments/{id} — partially updates a spool.
|
||||
func (h *FilamentHandler) Update(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var req dtos.UpdateFilamentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid request body",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Validate update fields.
|
||||
if err := services.ValidateUpdateFilamentRequest(req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "validation failed: " + err.Error(),
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Build updates map (only non-nil fields).
|
||||
updates := buildFilamentUpdates(req)
|
||||
|
||||
updated, err := h.service.Update(r.Context(), id, updates)
|
||||
if err != nil {
|
||||
slog.Error("failed to update filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if updated == nil {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: updated})
|
||||
}
|
||||
|
||||
// Delete handles DELETE /api/filaments/{id} — soft-deletes a spool.
|
||||
func (h *FilamentHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
deleted, err := h.service.SoftDelete(r.Context(), id)
|
||||
if err != nil {
|
||||
slog.Error("failed to delete filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if !deleted {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// buildFilamentUpdates converts an UpdateFilamentRequest to a map of column→value.
|
||||
func buildFilamentUpdates(req dtos.UpdateFilamentRequest) map[string]interface{} {
|
||||
updates := make(map[string]interface{})
|
||||
if req.Name != nil {
|
||||
updates["name"] = *req.Name
|
||||
}
|
||||
if req.MaterialBaseID != nil {
|
||||
updates["material_base_id"] = *req.MaterialBaseID
|
||||
}
|
||||
if req.MaterialFinishID != nil {
|
||||
updates["material_finish_id"] = *req.MaterialFinishID
|
||||
}
|
||||
if req.MaterialModifierID != nil {
|
||||
updates["material_modifier_id"] = *req.MaterialModifierID
|
||||
}
|
||||
if req.ColorHex != nil {
|
||||
updates["color_hex"] = *req.ColorHex
|
||||
}
|
||||
if req.Brand != nil {
|
||||
updates["brand"] = *req.Brand
|
||||
}
|
||||
if req.DiameterMM != nil {
|
||||
updates["diameter_mm"] = *req.DiameterMM
|
||||
}
|
||||
if req.InitialGrams != nil {
|
||||
updates["initial_grams"] = *req.InitialGrams
|
||||
}
|
||||
if req.RemainingGrams != nil {
|
||||
updates["remaining_grams"] = *req.RemainingGrams
|
||||
}
|
||||
if req.SpoolWeightGrams != nil {
|
||||
updates["spool_weight_grams"] = *req.SpoolWeightGrams
|
||||
}
|
||||
if req.CostUSD != nil {
|
||||
updates["cost_usd"] = *req.CostUSD
|
||||
}
|
||||
if req.LowStockThresholdGrams != nil {
|
||||
updates["low_stock_threshold_grams"] = *req.LowStockThresholdGrams
|
||||
}
|
||||
if req.Notes != nil {
|
||||
updates["notes"] = *req.Notes
|
||||
}
|
||||
if req.Barcode != nil {
|
||||
updates["barcode"] = *req.Barcode
|
||||
}
|
||||
return updates
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// HealthHandler provides a health check endpoint that verifies database connectivity.
|
||||
type HealthHandler struct {
|
||||
dbPool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewHealthHandler creates a new HealthHandler with the given database pool.
|
||||
func NewHealthHandler(dbPool *pgxpool.Pool) *HealthHandler {
|
||||
return &HealthHandler{dbPool: dbPool}
|
||||
}
|
||||
|
||||
// ServeHTTP handles GET /health requests.
|
||||
func (h *HealthHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
dbConnected := false
|
||||
if h.dbPool != nil {
|
||||
if err := h.dbPool.Ping(ctx); err == nil {
|
||||
dbConnected = true
|
||||
} else {
|
||||
slog.Warn("health check db ping failed", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
resp := map[string]any{
|
||||
"status": "ok",
|
||||
"timestamp": time.Now().UTC().Format(time.RFC3339),
|
||||
"db_connected": dbConnected,
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if !dbConnected {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
}
|
||||
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
||||
slog.Error("failed to encode health response", "error", err)
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// writeJSON serializes v as JSON to the response writer with the given status code.
|
||||
// Logs an error if encoding fails.
|
||||
func writeJSON(w http.ResponseWriter, status int, v interface{}) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
if err := json.NewEncoder(w).Encode(v); err != nil {
|
||||
slog.Error("failed to encode JSON response", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// parsePagination reads limit and offset query parameters with defaults of 20 and 0.
|
||||
func parsePagination(r *http.Request) (limit, offset int) {
|
||||
limit = 20
|
||||
offset = 0
|
||||
|
||||
if l := r.URL.Query().Get("limit"); l != "" {
|
||||
if parsed, err := strconv.Atoi(l); err == nil && parsed > 0 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
if o := r.URL.Query().Get("offset"); o != "" {
|
||||
if parsed, err := strconv.Atoi(o); err == nil && parsed >= 0 {
|
||||
offset = parsed
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ValidateCreateFilamentRequest validates a CreateFilamentRequest DTO.
|
||||
// Re-exports the service-layer validator for handler use.
|
||||
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||
return services.ValidateCreateFilamentRequest(req)
|
||||
}
|
||||
|
||||
// ValidateUpdateFilamentRequest validates an UpdateFilamentRequest DTO.
|
||||
// Re-exports the service-layer validator for handler use.
|
||||
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||
return services.ValidateUpdateFilamentRequest(req)
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// MaterialHandler handles requests for material lookup data.
|
||||
type MaterialHandler struct {
|
||||
repo *repositories.MaterialRepository
|
||||
}
|
||||
|
||||
// NewMaterialHandler creates a MaterialHandler with the given repository.
|
||||
func NewMaterialHandler(repo *repositories.MaterialRepository) *MaterialHandler {
|
||||
return &MaterialHandler{repo: repo}
|
||||
}
|
||||
|
||||
// List handles GET /api/materials — returns all material bases.
|
||||
func (h *MaterialHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
materials, err := h.repo.GetAll(r.Context())
|
||||
if err != nil {
|
||||
slog.Error("failed to list materials", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: materials})
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// PrintJobHandler handles HTTP requests for print job operations.
|
||||
type PrintJobHandler struct {
|
||||
service *services.PrintJobService
|
||||
}
|
||||
|
||||
// NewPrintJobHandler creates a PrintJobHandler with the given service.
|
||||
func NewPrintJobHandler(service *services.PrintJobService) *PrintJobHandler {
|
||||
return &PrintJobHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/print-jobs — returns paginated, filtered print jobs.
|
||||
func (h *PrintJobHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.PrintJobFilter{
|
||||
Status: r.URL.Query().Get("status"),
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
if pidStr := r.URL.Query().Get("printer_id"); pidStr != "" {
|
||||
pid, err := strconv.Atoi(pidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid printer_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.PrinterID = &pid
|
||||
}
|
||||
|
||||
jobs, total, err := h.service.List(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list print jobs", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: jobs,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// PrinterHandler handles HTTP requests for printer listings.
|
||||
type PrinterHandler struct {
|
||||
service *services.PrinterService
|
||||
}
|
||||
|
||||
// NewPrinterHandler creates a PrinterHandler with the given service.
|
||||
func NewPrinterHandler(service *services.PrinterService) *PrinterHandler {
|
||||
return &PrinterHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/printers — returns all printers with printer_type info.
|
||||
func (h *PrinterHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
printers, err := h.service.List(r.Context())
|
||||
if err != nil {
|
||||
slog.Error("failed to list printers", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: printers})
|
||||
}
|
||||
@@ -1,70 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// UsageLogHandler handles HTTP requests for usage log operations.
|
||||
type UsageLogHandler struct {
|
||||
repo *repositories.UsageLogRepository
|
||||
}
|
||||
|
||||
// NewUsageLogHandler creates a UsageLogHandler with the given repository.
|
||||
func NewUsageLogHandler(repo *repositories.UsageLogRepository) *UsageLogHandler {
|
||||
return &UsageLogHandler{repo: repo}
|
||||
}
|
||||
|
||||
// List handles GET /api/usage-logs — returns paginated, filtered usage logs.
|
||||
func (h *UsageLogHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.UsageLogFilter{
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
if sidStr := r.URL.Query().Get("spool_id"); sidStr != "" {
|
||||
sid, err := strconv.Atoi(sidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid spool_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.SpoolID = &sid
|
||||
}
|
||||
|
||||
if jidStr := r.URL.Query().Get("job_id"); jidStr != "" {
|
||||
jid, err := strconv.Atoi(jidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid job_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.JobID = &jid
|
||||
}
|
||||
|
||||
logs, total, err := h.repo.GetAll(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list usage logs", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: logs,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
// Package models defines the Extrudex domain model structs.
|
||||
// These map 1:1 to PostgreSQL tables with snake_case JSON serialization.
|
||||
// Nullable fields use pointer types; all timestamps are time.Time.
|
||||
package models
|
||||
|
||||
import "time"
|
||||
|
||||
// ============================================================================
|
||||
// Lookup Tables
|
||||
// ============================================================================
|
||||
|
||||
// PrinterType represents a printer technology category (fdm, resin, etc.).
|
||||
type PrinterType struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// JobStatus represents a print job lifecycle state.
|
||||
type JobStatus struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialBase represents a base material type (PLA, PETG, ABS, etc.).
|
||||
// Density and temperature ranges are stored here for grams-calculation and slicing guidance.
|
||||
type MaterialBase struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
DensityGCm3 float64 `json:"density_g_cm3"`
|
||||
ExtrusionTempMin *int `json:"extrusion_temp_min,omitempty"`
|
||||
ExtrusionTempMax *int `json:"extrusion_temp_max,omitempty"`
|
||||
BedTempMin *int `json:"bed_temp_min,omitempty"`
|
||||
BedTempMax *int `json:"bed_temp_max,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialFinish represents the visual/texture finish (Basic, Silk, Matte, etc.).
|
||||
type MaterialFinish struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialModifier represents an additive property (Carbon Fiber, Wood-Filled, etc.).
|
||||
type MaterialModifier struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Core Entity Tables
|
||||
// ============================================================================
|
||||
|
||||
// Printer represents a 3D printer in the fleet.
|
||||
type Printer struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
PrinterTypeID int `json:"printer_type_id"`
|
||||
PrinterType *PrinterType `json:"printer_type,omitempty"` // populated on JOIN queries
|
||||
Manufacturer *string `json:"manufacturer,omitempty"`
|
||||
Model *string `json:"model,omitempty"`
|
||||
MoonrakerURL *string `json:"moonraker_url,omitempty"`
|
||||
MoonrakerAPIKey *string `json:"moonraker_api_key,omitempty"`
|
||||
MQTTBrokerHost *string `json:"mqtt_broker_host,omitempty"`
|
||||
MQTTTopicPrefix *string `json:"mqtt_topic_prefix,omitempty"`
|
||||
MQTTTLSEnabled bool `json:"mqtt_tls_enabled"`
|
||||
IsActive bool `json:"is_active"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// FilamentSpool represents a physical filament spool in inventory.
|
||||
// material_finish_id defaults to 1 ("Basic"); material_modifier_id is optional.
|
||||
// Grams are always physically measured values — grams_used is derived, not stored.
|
||||
type FilamentSpool struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
MaterialBaseID int `json:"material_base_id"`
|
||||
MaterialBase *MaterialBase `json:"material_base,omitempty"` // JOIN
|
||||
MaterialFinishID int `json:"material_finish_id"`
|
||||
MaterialFinish *MaterialFinish `json:"material_finish,omitempty"` // JOIN
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
MaterialModifier *MaterialModifier `json:"material_modifier,omitempty"` // JOIN
|
||||
ColorHex string `json:"color_hex"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM float64 `json:"diameter_mm"`
|
||||
InitialGrams int `json:"initial_grams"`
|
||||
RemainingGrams int `json:"remaining_grams"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams int `json:"low_stock_threshold_grams"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// PrintJob represents a single print on a specific printer.
|
||||
// The filament_spool_id is a convenience reference; multi-spool jobs track usage in usage_logs.
|
||||
type PrintJob struct {
|
||||
ID int `json:"id"`
|
||||
PrinterID int `json:"printer_id"`
|
||||
Printer *Printer `json:"printer,omitempty"` // JOIN
|
||||
FilamentSpoolID *int `json:"filament_spool_id,omitempty"`
|
||||
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||
JobName string `json:"job_name"`
|
||||
FileName *string `json:"file_name,omitempty"`
|
||||
JobStatusID int `json:"job_status_id"`
|
||||
JobStatus *JobStatus `json:"job_status,omitempty"` // JOIN
|
||||
StartedAt *time.Time `json:"started_at,omitempty"`
|
||||
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||
EstimatedDurationSeconds *int `json:"estimated_duration_seconds,omitempty"`
|
||||
TotalMMExtruded *float64 `json:"total_mm_extruded,omitempty"`
|
||||
TotalGramsUsed *float64 `json:"total_grams_used,omitempty"`
|
||||
TotalCostUSD *float64 `json:"total_cost_usd,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// UsageLog records filament consumption for a specific spool during a print job.
|
||||
// This is the atomic unit of filament tracking — grams are derived from mm_extruded.
|
||||
type UsageLog struct {
|
||||
ID int `json:"id"`
|
||||
PrintJobID int `json:"print_job_id"`
|
||||
PrintJob *PrintJob `json:"print_job,omitempty"` // JOIN
|
||||
FilamentSpoolID int `json:"filament_spool_id"`
|
||||
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||
MMExtruded float64 `json:"mm_extruded"`
|
||||
GramsUsed float64 `json:"grams_used"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LoggedAt time.Time `json:"logged_at"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Application Settings
|
||||
// ============================================================================
|
||||
|
||||
// Setting represents a key-value application configuration entry.
|
||||
// The value is stored as JSONB in PostgreSQL, allowing flexible typed config.
|
||||
type Setting struct {
|
||||
ID int `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Value []byte `json:"value"` // raw JSON — marshalled/unmarshalled by caller
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
@@ -1,285 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// FilamentRepository handles database queries for filament_spools.
|
||||
type FilamentRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewFilamentRepository creates a FilamentRepository backed by the given pool.
|
||||
func NewFilamentRepository(pool *pgxpool.Pool) *FilamentRepository {
|
||||
return &FilamentRepository{pool: pool}
|
||||
}
|
||||
|
||||
// FilamentFilter holds query parameters for listing filament spools.
|
||||
type FilamentFilter struct {
|
||||
Material string // filter by material_base name (case-insensitive)
|
||||
Finish string // filter by material_finish name (case-insensitive)
|
||||
Color string // filter by exact color_hex match
|
||||
LowStock bool // if true, filter for remaining_grams <= low_stock_threshold_grams
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// spoolScanFields is the common SELECT column list for filament spools with JOINs.
|
||||
const spoolScanFields = `
|
||||
s.id, s.name,
|
||||
s.material_base_id,
|
||||
COALESCE(mb.name, '') as material_base_name,
|
||||
COALESCE(mb.density_g_cm3, 0) as material_base_density_g_cm3,
|
||||
COALESCE(mb.extrusion_temp_min, NULL::int) as material_base_extrusion_temp_min,
|
||||
COALESCE(mb.extrusion_temp_max, NULL::int) as material_base_extrusion_temp_max,
|
||||
COALESCE(mb.bed_temp_min, NULL::int) as material_base_bed_temp_min,
|
||||
COALESCE(mb.bed_temp_max, NULL::int) as material_base_bed_temp_max,
|
||||
COALESCE(mb.created_at, s.created_at) as material_base_created_at,
|
||||
COALESCE(mb.updated_at, s.created_at) as material_base_updated_at,
|
||||
s.material_finish_id,
|
||||
COALESCE(mf.name, '') as material_finish_name,
|
||||
mf.description as material_finish_description,
|
||||
COALESCE(mf.created_at, s.created_at) as material_finish_created_at,
|
||||
COALESCE(mf.updated_at, s.created_at) as material_finish_updated_at,
|
||||
s.material_modifier_id,
|
||||
mm.name as material_modifier_name,
|
||||
mm.description as material_modifier_description,
|
||||
mm.created_at as material_modifier_created_at,
|
||||
mm.updated_at as material_modifier_updated_at,
|
||||
s.color_hex, s.brand, s.diameter_mm,
|
||||
s.initial_grams, s.remaining_grams, s.spool_weight_grams,
|
||||
s.cost_usd, s.low_stock_threshold_grams,
|
||||
s.notes, s.barcode,
|
||||
s.deleted_at, s.created_at, s.updated_at`
|
||||
|
||||
const spoolFromJoins = `
|
||||
FROM filament_spools s
|
||||
LEFT JOIN material_bases mb ON s.material_base_id = mb.id
|
||||
LEFT JOIN material_finishes mf ON s.material_finish_id = mf.id
|
||||
LEFT JOIN material_modifiers mm ON s.material_modifier_id = mm.id`
|
||||
|
||||
// scanSpoolWithJoins scans a full spool row including all JOINed tables.
|
||||
func scanSpoolWithJoins(row interface{ Scan(...interface{}) error }) (models.FilamentSpool, error) {
|
||||
var s models.FilamentSpool
|
||||
var mb models.MaterialBase
|
||||
var mf models.MaterialFinish
|
||||
var mfDesc *string
|
||||
var modifierID *int
|
||||
var modName, modDesc *string
|
||||
var modCreatedAt, modUpdatedAt *time.Time
|
||||
|
||||
err := row.Scan(
|
||||
&s.ID, &s.Name,
|
||||
&s.MaterialBaseID,
|
||||
&mb.Name, &mb.DensityGCm3,
|
||||
&mb.ExtrusionTempMin, &mb.ExtrusionTempMax,
|
||||
&mb.BedTempMin, &mb.BedTempMax,
|
||||
&mb.CreatedAt, &mb.UpdatedAt,
|
||||
&s.MaterialFinishID,
|
||||
&mf.Name, &mfDesc,
|
||||
&mf.CreatedAt, &mf.UpdatedAt,
|
||||
&modifierID,
|
||||
&modName, &modDesc,
|
||||
&modCreatedAt, &modUpdatedAt,
|
||||
&s.ColorHex, &s.Brand, &s.DiameterMM,
|
||||
&s.InitialGrams, &s.RemainingGrams, &s.SpoolWeightGrams,
|
||||
&s.CostUSD, &s.LowStockThresholdGrams,
|
||||
&s.Notes, &s.Barcode,
|
||||
&s.DeletedAt, &s.CreatedAt, &s.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
|
||||
mb.ID = s.MaterialBaseID
|
||||
s.MaterialBase = &mb
|
||||
|
||||
mf.ID = s.MaterialFinishID
|
||||
if mfDesc != nil {
|
||||
mf.Description = mfDesc
|
||||
}
|
||||
s.MaterialFinish = &mf
|
||||
|
||||
s.MaterialModifierID = modifierID
|
||||
if modifierID != nil && modName != nil {
|
||||
mm := models.MaterialModifier{
|
||||
ID: *modifierID,
|
||||
Name: *modName,
|
||||
}
|
||||
if modDesc != nil {
|
||||
mm.Description = modDesc
|
||||
}
|
||||
if modCreatedAt != nil {
|
||||
mm.CreatedAt = *modCreatedAt
|
||||
}
|
||||
if modUpdatedAt != nil {
|
||||
mm.UpdatedAt = *modUpdatedAt
|
||||
}
|
||||
s.MaterialModifier = &mm
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// GetAll returns filament spools matching the given filters, with pagination.
|
||||
// Returns results, total matching count, and any error.
|
||||
func (r *FilamentRepository) GetAll(ctx context.Context, filter FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||
conditions := []string{"s.deleted_at IS NULL"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.Material != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(mb.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Material)
|
||||
argIdx++
|
||||
}
|
||||
if filter.Finish != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(mf.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Finish)
|
||||
argIdx++
|
||||
}
|
||||
if filter.Color != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("s.color_hex = $%d", argIdx))
|
||||
args = append(args, filter.Color)
|
||||
argIdx++
|
||||
}
|
||||
if filter.LowStock {
|
||||
conditions = append(conditions, "s.remaining_grams <= s.low_stock_threshold_grams")
|
||||
}
|
||||
|
||||
whereClause := ""
|
||||
if len(conditions) > 0 {
|
||||
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||
}
|
||||
|
||||
// Count total.
|
||||
var total int
|
||||
countQuery := "SELECT COUNT(*) " + spoolFromJoins + " " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := "SELECT " + spoolScanFields + " " + spoolFromJoins + " " +
|
||||
whereClause +
|
||||
" ORDER BY s.name ASC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var spools []models.FilamentSpool
|
||||
for rows.Next() {
|
||||
s, err := scanSpoolWithJoins(rows)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
spools = append(spools, s)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if spools == nil {
|
||||
spools = []models.FilamentSpool{}
|
||||
}
|
||||
|
||||
return spools, total, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single filament spool by ID with JOINed data.
|
||||
// Returns nil if not found or soft-deleted.
|
||||
func (r *FilamentRepository) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||
query := "SELECT " + spoolScanFields + " " + spoolFromJoins +
|
||||
" WHERE s.id = $1 AND s.deleted_at IS NULL"
|
||||
|
||||
row := r.pool.QueryRow(ctx, query, id)
|
||||
s, err := scanSpoolWithJoins(row)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s, nil
|
||||
}
|
||||
|
||||
// Create inserts a new filament spool and returns the created spool with JOINed data.
|
||||
func (r *FilamentRepository) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||
var id int
|
||||
err := r.pool.QueryRow(ctx, `
|
||||
INSERT INTO filament_spools (
|
||||
name, material_base_id, material_finish_id, material_modifier_id,
|
||||
color_hex, brand, diameter_mm, initial_grams, remaining_grams,
|
||||
spool_weight_grams, cost_usd, low_stock_threshold_grams,
|
||||
notes, barcode
|
||||
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)
|
||||
RETURNING id
|
||||
`,
|
||||
spool.Name, spool.MaterialBaseID, spool.MaterialFinishID, spool.MaterialModifierID,
|
||||
spool.ColorHex, spool.Brand, spool.DiameterMM, spool.InitialGrams, spool.RemainingGrams,
|
||||
spool.SpoolWeightGrams, spool.CostUSD, spool.LowStockThresholdGrams,
|
||||
spool.Notes, spool.Barcode,
|
||||
).Scan(&id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// Update applies partial updates to an existing filament spool.
|
||||
// Only non-nil fields in the update map are applied.
|
||||
// Returns the updated spool.
|
||||
func (r *FilamentRepository) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||
if len(updates) == 0 {
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
setClauses := []string{"updated_at = NOW()"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
for col, val := range updates {
|
||||
setClauses = append(setClauses, fmt.Sprintf("%s = $%d", col, argIdx))
|
||||
args = append(args, val)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
args = append(args, id)
|
||||
query := fmt.Sprintf("UPDATE filament_spools SET %s WHERE id = $%d AND deleted_at IS NULL",
|
||||
strings.Join(setClauses, ", "), argIdx)
|
||||
|
||||
result, err := r.pool.Exec(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result.RowsAffected() == 0 {
|
||||
return nil, nil // not found or deleted
|
||||
}
|
||||
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// SoftDelete marks a filament spool as deleted by setting deleted_at = NOW().
|
||||
// Returns true if a row was affected.
|
||||
func (r *FilamentRepository) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||
result, err := r.pool.Exec(ctx, `
|
||||
UPDATE filament_spools
|
||||
SET deleted_at = NOW(), updated_at = NOW()
|
||||
WHERE id = $1 AND deleted_at IS NULL
|
||||
`, id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return result.RowsAffected() > 0, nil
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
// Package repositories provides data access logic backed by PostgreSQL via pgxpool.
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// MaterialRepository handles database queries for material lookup tables.
|
||||
type MaterialRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewMaterialRepository creates a MaterialRepository backed by the given pool.
|
||||
func NewMaterialRepository(pool *pgxpool.Pool) *MaterialRepository {
|
||||
return &MaterialRepository{pool: pool}
|
||||
}
|
||||
|
||||
// GetAll returns all material bases ordered by name.
|
||||
func (r *MaterialRepository) GetAll(ctx context.Context) ([]models.MaterialBase, error) {
|
||||
rows, err := r.pool.Query(ctx, `
|
||||
SELECT id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max,
|
||||
bed_temp_min, bed_temp_max, created_at, updated_at
|
||||
FROM material_bases
|
||||
ORDER BY name
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var materials []models.MaterialBase
|
||||
for rows.Next() {
|
||||
var m models.MaterialBase
|
||||
if err := rows.Scan(
|
||||
&m.ID, &m.Name, &m.DensityGCm3,
|
||||
&m.ExtrusionTempMin, &m.ExtrusionTempMax,
|
||||
&m.BedTempMin, &m.BedTempMax,
|
||||
&m.CreatedAt, &m.UpdatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
materials = append(materials, m)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if materials == nil {
|
||||
materials = []models.MaterialBase{}
|
||||
}
|
||||
return materials, nil
|
||||
}
|
||||
@@ -1,157 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// PrintJobRepository handles database queries for print_jobs.
|
||||
type PrintJobRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewPrintJobRepository creates a PrintJobRepository backed by the given pool.
|
||||
func NewPrintJobRepository(pool *pgxpool.Pool) *PrintJobRepository {
|
||||
return &PrintJobRepository{pool: pool}
|
||||
}
|
||||
|
||||
// PrintJobFilter holds query parameters for listing print jobs.
|
||||
type PrintJobFilter struct {
|
||||
Status string // filter by job_status name (case-insensitive)
|
||||
PrinterID *int // filter by printer_id
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// scanPrintJobWithJoins scans a print_job row with JOINed tables.
|
||||
func (r *PrintJobRepository) scanPrintJobWithJoins(row interface{ Scan(...interface{}) error }) (models.PrintJob, error) {
|
||||
var pj models.PrintJob
|
||||
var js models.JobStatus
|
||||
|
||||
err := row.Scan(
|
||||
&pj.ID, &pj.PrinterID, &pj.FilamentSpoolID,
|
||||
&pj.JobName, &pj.FileName,
|
||||
&pj.JobStatusID,
|
||||
&pj.StartedAt, &pj.CompletedAt,
|
||||
&pj.DurationSeconds, &pj.EstimatedDurationSeconds,
|
||||
&pj.TotalMMExtruded, &pj.TotalGramsUsed, &pj.TotalCostUSD,
|
||||
&pj.Notes,
|
||||
&pj.DeletedAt, &pj.CreatedAt, &pj.UpdatedAt,
|
||||
&js.ID, &js.Name,
|
||||
&js.CreatedAt, &js.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return pj, err
|
||||
}
|
||||
|
||||
pj.JobStatus = &js
|
||||
return pj, nil
|
||||
}
|
||||
|
||||
// GetAll returns print jobs matching the given filters, with pagination.
|
||||
func (r *PrintJobRepository) GetAll(ctx context.Context, filter PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||
conditions := []string{"pj.deleted_at IS NULL"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.Status != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(js.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Status)
|
||||
argIdx++
|
||||
}
|
||||
if filter.PrinterID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("pj.printer_id = $%d", argIdx))
|
||||
args = append(args, *filter.PrinterID)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
whereClause := ""
|
||||
if len(conditions) > 0 {
|
||||
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||
}
|
||||
|
||||
// Count.
|
||||
var total int
|
||||
countQuery := `SELECT COUNT(*)
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
` + " " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := `SELECT
|
||||
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||
pj.job_name, pj.file_name,
|
||||
pj.job_status_id,
|
||||
pj.started_at, pj.completed_at,
|
||||
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||
pj.notes,
|
||||
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||
js.id, js.name,
|
||||
js.created_at, js.updated_at
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
` + whereClause +
|
||||
" ORDER BY pj.created_at DESC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var jobs []models.PrintJob
|
||||
for rows.Next() {
|
||||
pj, err := r.scanPrintJobWithJoins(rows)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
jobs = append(jobs, pj)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if jobs == nil {
|
||||
jobs = []models.PrintJob{}
|
||||
}
|
||||
|
||||
return jobs, total, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single print job by ID with JOINed job_status.
|
||||
func (r *PrintJobRepository) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||
row := r.pool.QueryRow(ctx, `
|
||||
SELECT
|
||||
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||
pj.job_name, pj.file_name,
|
||||
pj.job_status_id,
|
||||
pj.started_at, pj.completed_at,
|
||||
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||
pj.notes,
|
||||
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||
js.id, js.name,
|
||||
js.created_at, js.updated_at
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
WHERE pj.id = $1 AND pj.deleted_at IS NULL
|
||||
`, id)
|
||||
|
||||
pj, err := r.scanPrintJobWithJoins(row)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &pj, nil
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// PrinterRepository handles database queries for printers.
|
||||
type PrinterRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewPrinterRepository creates a PrinterRepository backed by the given pool.
|
||||
func NewPrinterRepository(pool *pgxpool.Pool) *PrinterRepository {
|
||||
return &PrinterRepository{pool: pool}
|
||||
}
|
||||
|
||||
// scanPrinterWithType scans a printer row with JOINed printer_type.
|
||||
func (r *PrinterRepository) scanPrinterWithType(row interface{ Scan(...interface{}) error }) (models.Printer, error) {
|
||||
var p models.Printer
|
||||
var pt models.PrinterType
|
||||
|
||||
err := row.Scan(
|
||||
&p.ID, &p.Name, &p.PrinterTypeID,
|
||||
&p.Manufacturer, &p.Model,
|
||||
&p.MoonrakerURL, &p.MoonrakerAPIKey,
|
||||
&p.MQTTBrokerHost, &p.MQTTTopicPrefix,
|
||||
&p.MQTTTLSEnabled, &p.IsActive,
|
||||
&p.CreatedAt, &p.UpdatedAt,
|
||||
&pt.ID, &pt.Name,
|
||||
&pt.CreatedAt, &pt.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return p, err
|
||||
}
|
||||
|
||||
p.PrinterType = &pt
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// GetAll returns all printers joined with their printer_type, ordered by name.
|
||||
func (r *PrinterRepository) GetAll(ctx context.Context) ([]models.Printer, error) {
|
||||
rows, err := r.pool.Query(ctx, `
|
||||
SELECT p.id, p.name, p.printer_type_id,
|
||||
p.manufacturer, p.model,
|
||||
p.moonraker_url, p.moonraker_api_key,
|
||||
p.mqtt_broker_host, p.mqtt_topic_prefix,
|
||||
p.mqtt_tls_enabled, p.is_active,
|
||||
p.created_at, p.updated_at,
|
||||
pt.id, pt.name,
|
||||
pt.created_at, pt.updated_at
|
||||
FROM printers p
|
||||
JOIN printer_types pt ON p.printer_type_id = pt.id
|
||||
ORDER BY p.name
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var printers []models.Printer
|
||||
for rows.Next() {
|
||||
p, err := r.scanPrinterWithType(rows)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
printers = append(printers, p)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if printers == nil {
|
||||
printers = []models.Printer{}
|
||||
}
|
||||
return printers, nil
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// UsageLogRepository handles database queries for usage_logs.
|
||||
type UsageLogRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewUsageLogRepository creates a UsageLogRepository backed by the given pool.
|
||||
func NewUsageLogRepository(pool *pgxpool.Pool) *UsageLogRepository {
|
||||
return &UsageLogRepository{pool: pool}
|
||||
}
|
||||
|
||||
// UsageLogFilter holds query parameters for listing usage logs.
|
||||
type UsageLogFilter struct {
|
||||
SpoolID *int // filter by filament_spool_id
|
||||
JobID *int // filter by print_job_id
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// GetAll returns usage logs matching the given filters, with pagination.
|
||||
func (r *UsageLogRepository) GetAll(ctx context.Context, filter UsageLogFilter) ([]models.UsageLog, int, error) {
|
||||
conditions := []string{"1=1"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.SpoolID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("ul.filament_spool_id = $%d", argIdx))
|
||||
args = append(args, *filter.SpoolID)
|
||||
argIdx++
|
||||
}
|
||||
if filter.JobID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("ul.print_job_id = $%d", argIdx))
|
||||
args = append(args, *filter.JobID)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
whereClause := "WHERE " + fmt.Sprintf("%s", conditions[0])
|
||||
for _, c := range conditions[1:] {
|
||||
whereClause += " AND " + c
|
||||
}
|
||||
|
||||
// Count.
|
||||
var total int
|
||||
countQuery := "SELECT COUNT(*) FROM usage_logs ul " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := `SELECT id, print_job_id, filament_spool_id, mm_extruded,
|
||||
grams_used, cost_usd, logged_at, created_at
|
||||
FROM usage_logs ul
|
||||
` + whereClause +
|
||||
" ORDER BY ul.logged_at DESC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var logs []models.UsageLog
|
||||
for rows.Next() {
|
||||
var l models.UsageLog
|
||||
if err := rows.Scan(
|
||||
&l.ID, &l.PrintJobID, &l.FilamentSpoolID,
|
||||
&l.MMExtruded, &l.GramsUsed, &l.CostUSD,
|
||||
&l.LoggedAt, &l.CreatedAt,
|
||||
); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
logs = append(logs, l)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if logs == nil {
|
||||
logs = []models.UsageLog{}
|
||||
}
|
||||
|
||||
return logs, total, nil
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
package router
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/handlers"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// New creates and configures a Chi router with all middleware and handlers mounted.
|
||||
func New(cfg *config.Config, dbPool *pgxpool.Pool) chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
// Middleware
|
||||
r.Use(middleware.RequestID)
|
||||
r.Use(middleware.RealIP)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.Recoverer)
|
||||
r.Use(middleware.Timeout(60 * time.Second))
|
||||
|
||||
// CORS
|
||||
r.Use(func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Access-Control-Allow-Origin", cfg.CorsOrigin)
|
||||
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||
if r.Method == http.MethodOptions {
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
})
|
||||
|
||||
// Health check
|
||||
healthHandler := handlers.NewHealthHandler(dbPool)
|
||||
r.Get("/health", healthHandler.ServeHTTP)
|
||||
|
||||
// ── Repositories ──────────────────────────────────────────────────────
|
||||
materialRepo := repositories.NewMaterialRepository(dbPool)
|
||||
filamentRepo := repositories.NewFilamentRepository(dbPool)
|
||||
printerRepo := repositories.NewPrinterRepository(dbPool)
|
||||
printJobRepo := repositories.NewPrintJobRepository(dbPool)
|
||||
usageLogRepo := repositories.NewUsageLogRepository(dbPool)
|
||||
|
||||
// ── Services ──────────────────────────────────────────────────────────
|
||||
filamentService := services.NewFilamentService(filamentRepo)
|
||||
printerService := services.NewPrinterService(printerRepo)
|
||||
printJobService := services.NewPrintJobService(printJobRepo)
|
||||
|
||||
// ── Handlers ──────────────────────────────────────────────────────────
|
||||
materialHandler := handlers.NewMaterialHandler(materialRepo)
|
||||
filamentHandler := handlers.NewFilamentHandler(filamentService)
|
||||
printerHandler := handlers.NewPrinterHandler(printerService)
|
||||
printJobHandler := handlers.NewPrintJobHandler(printJobService)
|
||||
usageLogHandler := handlers.NewUsageLogHandler(usageLogRepo)
|
||||
|
||||
// ── API Routes ────────────────────────────────────────────────────────
|
||||
r.Route("/api", func(r chi.Router) {
|
||||
r.Get("/materials", materialHandler.List)
|
||||
|
||||
r.Route("/filaments", func(r chi.Router) {
|
||||
r.Get("/", filamentHandler.List)
|
||||
r.Post("/", filamentHandler.Create)
|
||||
r.Route("/{id}", func(r chi.Router) {
|
||||
r.Get("/", filamentHandler.Get)
|
||||
r.Put("/", filamentHandler.Update)
|
||||
r.Delete("/", filamentHandler.Delete)
|
||||
})
|
||||
})
|
||||
|
||||
r.Get("/printers", printerHandler.List)
|
||||
r.Get("/print-jobs", printJobHandler.List)
|
||||
r.Get("/usage-logs", usageLogHandler.List)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
// Package services contains business logic and application services.
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// FilamentService wraps FilamentRepository with business logic and validation.
|
||||
type FilamentService struct {
|
||||
repo *repositories.FilamentRepository
|
||||
}
|
||||
|
||||
// NewFilamentService creates a FilamentService backed by the given repository.
|
||||
func NewFilamentService(repo *repositories.FilamentRepository) *FilamentService {
|
||||
return &FilamentService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns paginated filament spools filtered by the given criteria.
|
||||
func (s *FilamentService) List(ctx context.Context, filter repositories.FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||
return s.repo.GetAll(ctx, filter)
|
||||
}
|
||||
|
||||
// GetByID returns a single filament spool by ID.
|
||||
func (s *FilamentService) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||
return s.repo.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// Create validates and creates a new filament spool.
|
||||
func (s *FilamentService) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||
if err := validateFilamentSpool(spool); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.repo.Create(ctx, spool)
|
||||
}
|
||||
|
||||
// Update applies partial updates to a filament spool after validation.
|
||||
func (s *FilamentService) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||
return s.repo.Update(ctx, id, updates)
|
||||
}
|
||||
|
||||
// SoftDelete marks a filament spool as deleted.
|
||||
func (s *FilamentService) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||
return s.repo.SoftDelete(ctx, id)
|
||||
}
|
||||
|
||||
// PrinterService wraps PrinterRepository.
|
||||
type PrinterService struct {
|
||||
repo *repositories.PrinterRepository
|
||||
}
|
||||
|
||||
// NewPrinterService creates a PrinterService backed by the given repository.
|
||||
func NewPrinterService(repo *repositories.PrinterRepository) *PrinterService {
|
||||
return &PrinterService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns all printers.
|
||||
func (s *PrinterService) List(ctx context.Context) ([]models.Printer, error) {
|
||||
return s.repo.GetAll(ctx)
|
||||
}
|
||||
|
||||
// PrintJobService wraps PrintJobRepository.
|
||||
type PrintJobService struct {
|
||||
repo *repositories.PrintJobRepository
|
||||
}
|
||||
|
||||
// NewPrintJobService creates a PrintJobService backed by the given repository.
|
||||
func NewPrintJobService(repo *repositories.PrintJobRepository) *PrintJobService {
|
||||
return &PrintJobService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns paginated print jobs filtered by the given criteria.
|
||||
func (s *PrintJobService) List(ctx context.Context, filter repositories.PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||
return s.repo.GetAll(ctx, filter)
|
||||
}
|
||||
|
||||
// GetByID returns a single print job by ID.
|
||||
func (s *PrintJobService) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||
return s.repo.GetByID(ctx, id)
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
)
|
||||
|
||||
// colorHexPattern validates hex color strings like #FF0000 or #ff0000.
|
||||
var colorHexPattern = regexp.MustCompile(`^#[0-9A-Fa-f]{6}$`)
|
||||
|
||||
// validateFilamentSpool performs validation on a FilamentSpool entity.
|
||||
// Returns a descriptive error on failure.
|
||||
func validateFilamentSpool(s *models.FilamentSpool) error {
|
||||
if s.Name == "" {
|
||||
return errors.New("name is required")
|
||||
}
|
||||
if s.MaterialBaseID <= 0 {
|
||||
return errors.New("material_base_id is required")
|
||||
}
|
||||
if s.MaterialFinishID <= 0 {
|
||||
return errors.New("material_finish_id is required")
|
||||
}
|
||||
if !colorHexPattern.MatchString(s.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if s.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if s.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateCreateFilamentRequest validates a creation DTO.
|
||||
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||
if req.Name == "" {
|
||||
return errors.New("name is required")
|
||||
}
|
||||
if req.MaterialBaseID <= 0 {
|
||||
return errors.New("material_base_id is required")
|
||||
}
|
||||
if req.MaterialFinishID <= 0 {
|
||||
return errors.New("material_finish_id is required")
|
||||
}
|
||||
if !colorHexPattern.MatchString(req.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if req.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if req.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateUpdateFilamentRequest validates partial update fields.
|
||||
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||
if req.ColorHex != nil && !colorHexPattern.MatchString(*req.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if req.InitialGrams != nil && *req.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if req.RemainingGrams != nil && *req.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
-- Migration: 000001_initial_schema (rollback)
|
||||
-- Description: Drop all tables and indexes created in the initial schema migration
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
|
||||
BEGIN;
|
||||
|
||||
DROP TABLE IF EXISTS usage_logs CASCADE;
|
||||
DROP TABLE IF EXISTS print_jobs CASCADE;
|
||||
DROP TABLE IF EXISTS filament_spools CASCADE;
|
||||
DROP TABLE IF EXISTS printers CASCADE;
|
||||
DROP TABLE IF EXISTS settings CASCADE;
|
||||
DROP TABLE IF EXISTS material_modifiers CASCADE;
|
||||
DROP TABLE IF EXISTS material_finishes CASCADE;
|
||||
DROP TABLE IF EXISTS material_bases CASCADE;
|
||||
DROP TABLE IF EXISTS job_statuses CASCADE;
|
||||
DROP TABLE IF EXISTS printer_types CASCADE;
|
||||
|
||||
COMMIT;
|
||||
@@ -1,231 +0,0 @@
|
||||
-- Migration: 000001_initial_schema
|
||||
-- Description: Create initial Extrudex schema — lookup tables, core entities, and settings
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
--
|
||||
-- Design decisions:
|
||||
-- - Lookup tables for material_base, material_finish, material_modifier (no free-text enums)
|
||||
-- - Lookup tables for printer_type and job_status (extensible, no hard-coded enum values)
|
||||
-- - FK ON DELETE: RESTRICT on critical parents (material_base, material_finish, printer),
|
||||
-- SET NULL on optional parents (modifier, spool on print_jobs),
|
||||
-- CASCADE for usage_logs when parent job is deleted
|
||||
-- - Soft-delete (deleted_at) on spools and print_jobs for safety
|
||||
-- - JSONB config column on settings for flexible app-wide configuration
|
||||
-- - All identifiers snake_case per project convention
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Lookup Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Printer types (fdm, resin, etc.) — extensible, not a raw enum
|
||||
CREATE TABLE printer_types (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Job statuses (pending, printing, paused, completed, failed, cancelled)
|
||||
CREATE TABLE job_statuses (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material base types (PLA, PETG, ABS, TPU, ASA, Nylon, PC)
|
||||
CREATE TABLE material_bases (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
density_g_cm3 DECIMAL(5,3) NOT NULL,
|
||||
extrusion_temp_min INT,
|
||||
extrusion_temp_max INT,
|
||||
bed_temp_min INT,
|
||||
bed_temp_max INT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material finishes (Basic, Silk, Matte, Glossy, Satin)
|
||||
CREATE TABLE material_finishes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material modifiers (Wood-Filled, Carbon Fiber, Glow-in-Dark, Marble)
|
||||
CREATE TABLE material_modifiers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Core Entity Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- 3D printers in the fleet
|
||||
CREATE TABLE printers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
printer_type_id INT NOT NULL,
|
||||
manufacturer VARCHAR(255),
|
||||
model VARCHAR(255),
|
||||
moonraker_url VARCHAR(512),
|
||||
moonraker_api_key VARCHAR(512),
|
||||
mqtt_broker_host VARCHAR(255),
|
||||
mqtt_topic_prefix VARCHAR(255),
|
||||
mqtt_tls_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_printers_printer_type
|
||||
FOREIGN KEY (printer_type_id) REFERENCES printer_types(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- Filament spools — the core inventory item
|
||||
CREATE TABLE filament_spools (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
material_base_id INT NOT NULL,
|
||||
material_finish_id INT NOT NULL DEFAULT 1, -- "Basic" (seed data populates this first)
|
||||
material_modifier_id INT,
|
||||
color_hex VARCHAR(7) NOT NULL CHECK (color_hex ~ '^#[0-9A-Fa-f]{6}$'),
|
||||
brand VARCHAR(255),
|
||||
diameter_mm DECIMAL(4,2) NOT NULL DEFAULT 1.75,
|
||||
initial_grams INT NOT NULL CHECK (initial_grams > 0),
|
||||
remaining_grams INT NOT NULL CHECK (remaining_grams >= 0),
|
||||
spool_weight_grams INT, -- measured empty-spool weight (tare), nullable
|
||||
cost_usd DECIMAL(10,2),
|
||||
low_stock_threshold_grams INT NOT NULL DEFAULT 50,
|
||||
notes TEXT,
|
||||
barcode VARCHAR(255) UNIQUE,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_spools_material_base
|
||||
FOREIGN KEY (material_base_id) REFERENCES material_bases(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_spools_material_finish
|
||||
FOREIGN KEY (material_finish_id) REFERENCES material_finishes(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_spools_material_modifier
|
||||
FOREIGN KEY (material_modifier_id) REFERENCES material_modifiers(id)
|
||||
ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Print jobs — each job is one print on one printer
|
||||
CREATE TABLE print_jobs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
printer_id INT NOT NULL,
|
||||
filament_spool_id INT, -- nullable: a job may use multiple spools (captured in usage_logs)
|
||||
job_name VARCHAR(255) NOT NULL,
|
||||
file_name VARCHAR(512),
|
||||
job_status_id INT NOT NULL DEFAULT 1, -- "pending"
|
||||
started_at TIMESTAMPTZ,
|
||||
completed_at TIMESTAMPTZ,
|
||||
duration_seconds INT,
|
||||
estimated_duration_seconds INT,
|
||||
total_mm_extruded DECIMAL(12,2),
|
||||
total_grams_used DECIMAL(10,2),
|
||||
total_cost_usd DECIMAL(10,4),
|
||||
notes TEXT,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_print_jobs_printer
|
||||
FOREIGN KEY (printer_id) REFERENCES printers(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_print_jobs_spool
|
||||
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||
ON DELETE SET NULL,
|
||||
|
||||
CONSTRAINT fk_print_jobs_status
|
||||
FOREIGN KEY (job_status_id) REFERENCES job_statuses(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- Usage logs — granular tracking of filament consumed per job, per spool
|
||||
CREATE TABLE usage_logs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
print_job_id INT NOT NULL,
|
||||
filament_spool_id INT NOT NULL,
|
||||
mm_extruded DECIMAL(12,2) NOT NULL CHECK (mm_extruded > 0),
|
||||
grams_used DECIMAL(10,2) NOT NULL CHECK (grams_used > 0),
|
||||
cost_usd DECIMAL(10,4),
|
||||
logged_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_usage_logs_print_job
|
||||
FOREIGN KEY (print_job_id) REFERENCES print_jobs(id)
|
||||
ON DELETE CASCADE,
|
||||
|
||||
CONSTRAINT fk_usage_logs_spool
|
||||
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Application Settings
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE settings (
|
||||
id SERIAL PRIMARY KEY,
|
||||
key VARCHAR(255) NOT NULL UNIQUE,
|
||||
value JSONB NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Indexes
|
||||
-- ============================================================================
|
||||
|
||||
-- Filament spools — query patterns: lookup by material, low-stock scans, barcode scans
|
||||
CREATE INDEX ix_spools_material_base_id ON filament_spools(material_base_id);
|
||||
CREATE INDEX ix_spools_material_finish_id ON filament_spools(material_finish_id);
|
||||
CREATE INDEX ix_spools_material_modifier_id ON filament_spools(material_modifier_id);
|
||||
CREATE INDEX ix_spools_remaining_grams ON filament_spools(remaining_grams)
|
||||
WHERE deleted_at IS NULL; -- partial index: only active spools for low-stock queries
|
||||
CREATE INDEX ix_spools_barcode ON filament_spools(barcode)
|
||||
WHERE barcode IS NOT NULL AND deleted_at IS NULL;
|
||||
CREATE INDEX ix_spools_deleted_at ON filament_spools(deleted_at)
|
||||
WHERE deleted_at IS NOT NULL; -- small index for soft-delete filtering
|
||||
|
||||
-- Printers
|
||||
CREATE INDEX ix_printers_printer_type_id ON printers(printer_type_id);
|
||||
CREATE INDEX ix_printers_is_active ON printers(is_active)
|
||||
WHERE is_active = TRUE; -- partial index for fleet dashboard queries
|
||||
|
||||
-- Print jobs — query by printer, status, date range, and soft-delete filter
|
||||
CREATE INDEX ix_print_jobs_printer_id ON print_jobs(printer_id);
|
||||
CREATE INDEX ix_print_jobs_spool_id ON print_jobs(filament_spool_id)
|
||||
WHERE filament_spool_id IS NOT NULL;
|
||||
CREATE INDEX ix_print_jobs_status_id ON print_jobs(job_status_id);
|
||||
CREATE INDEX ix_print_jobs_created_at ON print_jobs(created_at DESC);
|
||||
CREATE INDEX ix_print_jobs_deleted_at ON print_jobs(deleted_at)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
-- Usage logs — always queried by job or spool
|
||||
CREATE INDEX ix_usage_logs_print_job_id ON usage_logs(print_job_id);
|
||||
CREATE INDEX ix_usage_logs_spool_id ON usage_logs(filament_spool_id);
|
||||
CREATE INDEX ix_usage_logs_logged_at ON usage_logs(logged_at DESC);
|
||||
|
||||
-- Settings — key lookups
|
||||
CREATE INDEX ix_settings_key ON settings(key);
|
||||
|
||||
COMMIT;
|
||||
@@ -1,15 +0,0 @@
|
||||
-- Migration: 000002_seed_data (rollback)
|
||||
-- Description: Remove seed data inserted in 000002
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
|
||||
BEGIN;
|
||||
|
||||
DELETE FROM settings WHERE key IN ('default_low_stock_threshold_grams', 'default_diameter_mm', 'filament_cross_section_area_mm2');
|
||||
DELETE FROM material_modifiers WHERE id IN (1, 2, 3, 4);
|
||||
DELETE FROM material_finishes WHERE id IN (1, 2, 3, 4, 5);
|
||||
DELETE FROM material_bases WHERE id IN (1, 2, 3, 4, 5, 6, 7);
|
||||
DELETE FROM job_statuses WHERE id IN (1, 2, 3, 4, 5, 6);
|
||||
DELETE FROM printer_types WHERE id IN (1, 2);
|
||||
|
||||
COMMIT;
|
||||
@@ -1,95 +0,0 @@
|
||||
-- Seed Data: Extrudex common reference data
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
--
|
||||
-- IMPORTANT: IDs are explicitly assigned to satisfy the DEFAULT constraints:
|
||||
-- - filament_spools.material_finish_id DEFAULT 1 ("Basic")
|
||||
-- - print_jobs.job_status_id DEFAULT 1 ("pending")
|
||||
--
|
||||
-- Density values sourced from common manufacturer specifications.
|
||||
-- Temperature ranges are conservative/typical; users can override per-spool.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Printer Types
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO printer_types (id, name) VALUES
|
||||
(1, 'fdm'),
|
||||
(2, 'resin')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
-- Reset the sequence so future inserts start after our explicit IDs
|
||||
SELECT setval('printer_types_id_seq', GREATEST(2, (SELECT MAX(id) FROM printer_types)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Job Statuses
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO job_statuses (id, name) VALUES
|
||||
(1, 'pending'),
|
||||
(2, 'printing'),
|
||||
(3, 'paused'),
|
||||
(4, 'completed'),
|
||||
(5, 'failed'),
|
||||
(6, 'cancelled')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('job_statuses_id_seq', GREATEST(6, (SELECT MAX(id) FROM job_statuses)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Bases (common filament types)
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO material_bases (id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max, bed_temp_min, bed_temp_max) VALUES
|
||||
(1, 'PLA', 1.24, 190, 220, 0, 60),
|
||||
(2, 'PETG', 1.27, 230, 250, 70, 90),
|
||||
(3, 'ABS', 1.04, 230, 260, 90, 110),
|
||||
(4, 'TPU', 1.21, 220, 250, 0, 60),
|
||||
(5, 'ASA', 1.07, 240, 260, 90, 110),
|
||||
(6, 'Nylon', 1.14, 240, 280, 70, 100),
|
||||
(7, 'PC', 1.20, 260, 310, 90, 120)
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_bases_id_seq', GREATEST(7, (SELECT MAX(id) FROM material_bases)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Finishes
|
||||
-- ============================================================================
|
||||
-- ID 1 = "Basic" is the default for new spools (DEFAULT 1 constraint)
|
||||
|
||||
INSERT INTO material_finishes (id, name, description) VALUES
|
||||
(1, 'Basic', 'Standard solid-color filament with no special finish'),
|
||||
(2, 'Silk', 'Glossy silk-like sheen, often used for decorative prints'),
|
||||
(3, 'Matte', 'Flat non-reflective surface finish'),
|
||||
(4, 'Glossy', 'High-shine reflective surface'),
|
||||
(5, 'Satin', 'Semi-gloss between matte and glossy')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_finishes_id_seq', GREATEST(5, (SELECT MAX(id) FROM material_finishes)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Modifiers
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO material_modifiers (id, name, description) VALUES
|
||||
(1, 'Wood-Filled', 'Contains wood fibers for natural wood-like appearance and texture'),
|
||||
(2, 'Carbon Fiber', 'Reinforced with carbon fibers for increased stiffness and strength'),
|
||||
(3, 'Glow-in-Dark', 'Phosphorescent additive that glows after exposure to light'),
|
||||
(4, 'Marble', 'Contains specks for a stone-like marble appearance')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_modifiers_id_seq', GREATEST(4, (SELECT MAX(id) FROM material_modifiers)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Default Application Settings
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO settings (key, value, description) VALUES
|
||||
('default_low_stock_threshold_grams', '50', 'Default grams threshold for low-stock alerts on new spools'),
|
||||
('default_diameter_mm', '1.75', 'Default filament diameter for new spools (1.75mm is the modern standard)'),
|
||||
('filament_cross_section_area_mm2', '2.405', 'Cross-sectional area for 1.75mm filament: π × (1.75/2)²')
|
||||
ON CONFLICT (key) DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
44
frontend/.gitignore
vendored
Normal file
44
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files.
|
||||
|
||||
# Compiled output
|
||||
/dist
|
||||
/tmp
|
||||
/out-tsc
|
||||
/bazel-out
|
||||
|
||||
# Node
|
||||
/node_modules
|
||||
npm-debug.log
|
||||
yarn-error.log
|
||||
|
||||
# IDEs and editors
|
||||
.idea/
|
||||
.project
|
||||
.classpath
|
||||
.c9/
|
||||
*.launch
|
||||
.settings/
|
||||
*.sublime-workspace
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/mcp.json
|
||||
.history/*
|
||||
|
||||
# Miscellaneous
|
||||
/.angular/cache
|
||||
.sass-cache/
|
||||
/connect.lock
|
||||
/coverage
|
||||
/libpeerconnection.log
|
||||
testem.log
|
||||
/typings
|
||||
__screenshots__/
|
||||
|
||||
# System files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
@@ -1,14 +1,20 @@
|
||||
# Build stage
|
||||
# Multi-stage build for production
|
||||
FROM node:22-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Serve stage
|
||||
# Production stage — serve with nginx
|
||||
FROM nginx:alpine
|
||||
|
||||
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
||||
@@ -10,7 +10,7 @@ export default tseslint.config(
|
||||
extends: [js.configs.recommended, ...tseslint.configs.recommended],
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
ecmaVersion: 2023,
|
||||
globals: globals.browser,
|
||||
},
|
||||
plugins: {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#0f172a" />
|
||||
<title>Extrudex</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -1,23 +1,16 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://backend:8080/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
}
|
||||
|
||||
2042
frontend/package-lock.json
generated
2042
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,35 +1,36 @@
|
||||
{
|
||||
"name": "extrudex-frontend",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tanstack/react-query": "^5.60.0",
|
||||
"axios": "^1.7.0",
|
||||
"lucide-react": "^0.460.0",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-router-dom": "^7.0.0"
|
||||
"@tailwindcss/vite": "^4.2.4",
|
||||
"@tanstack/react-query": "^5.100.9",
|
||||
"axios": "^1.16.0",
|
||||
"react": "^19.2.5",
|
||||
"react-dom": "^19.2.5",
|
||||
"react-router-dom": "^7.15.0",
|
||||
"tailwindcss": "^4.2.4",
|
||||
"zustand": "^5.0.13"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "^4.2.4",
|
||||
"@tailwindcss/vite": "^4.2.4",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"@vitejs/plugin-react": "^4.3.0",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"eslint": "^9.15.0",
|
||||
"eslint-plugin-react-hooks": "^5.0.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.14",
|
||||
"postcss": "^8.4.49",
|
||||
"tailwindcss": "^4.0.0",
|
||||
"typescript": "~5.6.0",
|
||||
"vite": "^6.0.0"
|
||||
"@eslint/js": "^10.0.1",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/react-router-dom": "^5.3.3",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"eslint": "^10.2.1",
|
||||
"eslint-plugin-react-hooks": "^7.1.1",
|
||||
"eslint-plugin-react-refresh": "^0.5.2",
|
||||
"globals": "^17.5.0",
|
||||
"typescript": "~6.0.2",
|
||||
"typescript-eslint": "^8.58.2",
|
||||
"vite": "^8.0.10"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
export default {
|
||||
plugins: {
|
||||
'@tailwindcss/postcss': {},
|
||||
},
|
||||
}
|
||||
BIN
frontend/public/favicon.ico
Normal file
BIN
frontend/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
@@ -1,27 +1,16 @@
|
||||
import { useState, useEffect } from 'react'
|
||||
import { Routes, Route } from 'react-router-dom'
|
||||
import ErrorBoundary from './components/ErrorBoundary'
|
||||
import HomePage from './pages/HomePage'
|
||||
|
||||
function App() {
|
||||
const [health, setHealth] = useState<any>(null)
|
||||
|
||||
useEffect(() => {
|
||||
fetch('/api/health')
|
||||
.then(r => r.json())
|
||||
.then(setHealth)
|
||||
.catch(console.error)
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className="min-h-screen flex items-center justify-center">
|
||||
<div className="p-6 rounded-lg bg-slate-800 shadow-xl max-w-md w-full">
|
||||
<h1 className="text-2xl font-bold mb-4 text-emerald-400">Extrudex</h1>
|
||||
<p className="text-slate-300 mb-4">React frontend scaffold</p>
|
||||
{health && (
|
||||
<pre className="text-xs bg-slate-900 p-3 rounded overflow-auto">
|
||||
{JSON.stringify(health, null, 2)}
|
||||
</pre>
|
||||
)}
|
||||
<ErrorBoundary>
|
||||
<div className="min-h-screen bg-slate-900 text-slate-100">
|
||||
<Routes>
|
||||
<Route path="/" element={<HomePage />} />
|
||||
</Routes>
|
||||
</div>
|
||||
</div>
|
||||
</ErrorBoundary>
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
50
frontend/src/components/ErrorBoundary.tsx
Normal file
50
frontend/src/components/ErrorBoundary.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import { Component, type ReactNode } from 'react'
|
||||
|
||||
interface Props {
|
||||
children: ReactNode
|
||||
}
|
||||
|
||||
interface State {
|
||||
hasError: boolean
|
||||
error?: Error
|
||||
}
|
||||
|
||||
class ErrorBoundary extends Component<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props)
|
||||
this.state = { hasError: false }
|
||||
}
|
||||
|
||||
static getDerivedStateFromError(error: Error): State {
|
||||
return { hasError: true, error }
|
||||
}
|
||||
|
||||
componentDidCatch(error: Error, info: React.ErrorInfo) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('ErrorBoundary caught:', error, info)
|
||||
}
|
||||
|
||||
render() {
|
||||
if (this.state.hasError) {
|
||||
return (
|
||||
<div className="flex min-h-screen items-center justify-center p-4">
|
||||
<div className="rounded-xl border border-red-500/30 bg-red-950/40 p-6 text-center shadow-lg backdrop-blur-sm">
|
||||
<h2 className="mb-2 text-xl font-semibold text-red-400">Something went wrong</h2>
|
||||
<p className="mb-4 text-sm text-red-300">
|
||||
{this.state.error?.message || 'An unexpected error occurred.'}
|
||||
</p>
|
||||
<button
|
||||
onClick={() => window.location.reload()}
|
||||
className="rounded-lg bg-red-600 px-4 py-2 text-sm font-medium text-white hover:bg-red-700"
|
||||
>
|
||||
Reload Page
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return this.props.children
|
||||
}
|
||||
}
|
||||
|
||||
export default ErrorBoundary
|
||||
21
frontend/src/components/ErrorState.tsx
Normal file
21
frontend/src/components/ErrorState.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
export default function ErrorState({
|
||||
message = 'Something went wrong.',
|
||||
onRetry,
|
||||
}: {
|
||||
message?: string
|
||||
onRetry?: () => void
|
||||
}) {
|
||||
return (
|
||||
<div className="flex min-h-[120px] flex-col items-center justify-center gap-3 rounded-xl border border-red-500/20 bg-red-950/30 p-6 text-center">
|
||||
<p className="text-sm text-red-300">{message}</p>
|
||||
{onRetry && (
|
||||
<button
|
||||
onClick={onRetry}
|
||||
className="rounded-lg bg-red-600 px-3 py-1.5 text-xs font-medium text-white hover:bg-red-700"
|
||||
>
|
||||
Retry
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
14
frontend/src/components/LoadingSpinner.tsx
Normal file
14
frontend/src/components/LoadingSpinner.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
export default function LoadingSpinner({ size = 'md' }: { size?: 'sm' | 'md' | 'lg' }) {
|
||||
const sizeClass =
|
||||
size === 'sm' ? 'h-4 w-4 border-2' : size === 'lg' ? 'h-10 w-10 border-4' : 'h-6 w-6 border-2'
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-center p-4">
|
||||
<div
|
||||
className={`${sizeClass} animate-spin rounded-full border-slate-600 border-t-sky-400`}
|
||||
role="status"
|
||||
aria-label="Loading"
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
11
frontend/src/hooks/useHealth.ts
Normal file
11
frontend/src/hooks/useHealth.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { healthCheck } from '../services/api'
|
||||
|
||||
export function useHealth() {
|
||||
return useQuery({
|
||||
queryKey: ['health'],
|
||||
queryFn: healthCheck,
|
||||
retry: 2,
|
||||
refetchInterval: 30000,
|
||||
})
|
||||
}
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
min-width: 320px;
|
||||
min-height: 100vh;
|
||||
background-color: #0f172a; /* slate-900 */
|
||||
color: #f8fafc; /* slate-50 */
|
||||
background-color: #0f172a;
|
||||
color: #e2e8f0;
|
||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { BrowserRouter } from 'react-router-dom'
|
||||
import './index.css'
|
||||
import App from './App'
|
||||
import App from './App.tsx'
|
||||
|
||||
const queryClient = new QueryClient()
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<BrowserRouter>
|
||||
<App />
|
||||
</BrowserRouter>
|
||||
</QueryClientProvider>
|
||||
</StrictMode>,
|
||||
)
|
||||
|
||||
36
frontend/src/pages/HomePage.tsx
Normal file
36
frontend/src/pages/HomePage.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
import LoadingSpinner from '../components/LoadingSpinner'
|
||||
import ErrorState from '../components/ErrorState'
|
||||
import { useHealth } from '../hooks/useHealth'
|
||||
|
||||
export default function HomePage() {
|
||||
const { data, isLoading, isError, refetch } = useHealth()
|
||||
|
||||
return (
|
||||
<div className="flex min-h-screen flex-col items-center justify-center gap-6 p-6">
|
||||
<h1 className="text-3xl font-bold tracking-tight text-sky-400">Extrudex</h1>
|
||||
<p className="text-slate-400">Filament inventory & print tracking</p>
|
||||
|
||||
<div className="w-full max-w-md rounded-xl border border-slate-700 bg-slate-800/60 p-6 shadow-lg backdrop-blur-sm">
|
||||
<h2 className="mb-3 text-sm font-semibold uppercase tracking-wider text-slate-400">
|
||||
Backend Health
|
||||
</h2>
|
||||
|
||||
{isLoading && <LoadingSpinner />}
|
||||
|
||||
{isError && (
|
||||
<ErrorState
|
||||
message="Backend is unreachable."
|
||||
onRetry={() => refetch()}
|
||||
/>
|
||||
)}
|
||||
|
||||
{data && (
|
||||
<div className="flex items-center gap-2 text-emerald-400">
|
||||
<span className="h-2 w-2 rounded-full bg-emerald-400" />
|
||||
<span className="text-sm font-medium">{data.status || 'ok'}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
25
frontend/src/services/api.ts
Normal file
25
frontend/src/services/api.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import axios from 'axios'
|
||||
|
||||
const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8080'
|
||||
|
||||
export const api = axios.create({
|
||||
baseURL: API_BASE_URL,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
timeout: 10000,
|
||||
})
|
||||
|
||||
api.interceptors.response.use(
|
||||
(response) => response,
|
||||
(error) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error('API error:', error)
|
||||
return Promise.reject(error)
|
||||
}
|
||||
)
|
||||
|
||||
export async function healthCheck(): Promise<{ status: string }> {
|
||||
const { data } = await api.get('/health')
|
||||
return data
|
||||
}
|
||||
6
frontend/src/types/index.ts
Normal file
6
frontend/src/types/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
// Shared TypeScript types for Extrudex frontend
|
||||
// Placeholder — expand as API contracts stabilize
|
||||
|
||||
export interface HealthResponse {
|
||||
status: string
|
||||
}
|
||||
8
frontend/src/vite-env.d.ts
vendored
8
frontend/src/vite-env.d.ts
vendored
@@ -1 +1,9 @@
|
||||
/// <reference types="vite/client" />
|
||||
|
||||
interface ImportMetaEnv {
|
||||
readonly VITE_API_BASE_URL: string
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
readonly env: ImportMetaEnv
|
||||
}
|
||||
|
||||
@@ -5,7 +5,13 @@ export default {
|
||||
"./src/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {},
|
||||
extend: {
|
||||
colors: {
|
||||
slate: {
|
||||
850: '#1e293b',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
|
||||
@@ -1,24 +1,38 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"types": ["vite/client"],
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"isolatedModules": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
"strict": true,
|
||||
|
||||
/* Linting */
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["src/*"]
|
||||
}
|
||||
|
||||
/* Strict mode */
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"strictFunctionTypes": true,
|
||||
"strictBindCallApply": true,
|
||||
"strictPropertyInitialization": true,
|
||||
"noImplicitThis": true,
|
||||
"alwaysStrict": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"exactOptionalPropertyTypes": true,
|
||||
"noImplicitReturns": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
|
||||
@@ -1,24 +1,7 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"isolatedModules": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src"]
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "es2023",
|
||||
"lib": ["ES2023"],
|
||||
"module": "esnext",
|
||||
"types": ["node"],
|
||||
"skipLibCheck": true,
|
||||
"module": "ESNext",
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
|
||||
@@ -2,18 +2,15 @@ import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react(), tailwindcss()],
|
||||
server: {
|
||||
port: 5173,
|
||||
proxy: {
|
||||
'/api': {
|
||||
target: 'http://localhost:8080',
|
||||
changeOrigin: true,
|
||||
}
|
||||
}
|
||||
host: true,
|
||||
},
|
||||
build: {
|
||||
outDir: 'dist',
|
||||
}
|
||||
sourcemap: true,
|
||||
},
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user