Compare commits
13 Commits
agent/rex/
...
f1614029b5
| Author | SHA1 | Date | |
|---|---|---|---|
| f1614029b5 | |||
| 1109d1dd2f | |||
| fd26b205bf | |||
| 41f66005a6 | |||
| 62d74beba4 | |||
| fca2ef5b84 | |||
| 3ac8432360 | |||
| f15597966f | |||
| a54fcdd371 | |||
| 1b86d617cd | |||
|
|
fd39fff433 | ||
| 2243859286 | |||
|
|
3fe0850711 |
@@ -1,37 +1,25 @@
|
|||||||
# ── Stage 1: Build ──────────────────────────────────────────
|
# Build stage
|
||||||
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
|
FROM golang:1.24-alpine AS builder
|
||||||
WORKDIR /src
|
|
||||||
|
|
||||||
# Copy csproj first for layer caching — restores before copying source
|
|
||||||
COPY Extrudex.csproj .
|
|
||||||
RUN dotnet restore
|
|
||||||
|
|
||||||
# Copy the rest of the source
|
|
||||||
COPY . .
|
|
||||||
RUN dotnet publish Extrudex.csproj \
|
|
||||||
-c Release \
|
|
||||||
-o /app/publish \
|
|
||||||
--no-restore
|
|
||||||
|
|
||||||
# ── Stage 2: Runtime ────────────────────────────────────────
|
|
||||||
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS runtime
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install curl for health check (not included in aspnet base image)
|
# Copy go mod files first for caching
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/*
|
COPY go.mod go.sum ./
|
||||||
|
RUN go mod download
|
||||||
|
|
||||||
# Non-root user for security
|
# Copy source and build
|
||||||
RUN adduser --disabled-password --gecos "" appuser
|
COPY . .
|
||||||
USER appuser
|
RUN CGO_ENABLED=0 GOOS=linux go build -o server ./cmd/server
|
||||||
|
|
||||||
# Copy published output from build stage
|
# Final stage
|
||||||
COPY --from=build /app/publish .
|
FROM alpine:latest
|
||||||
|
RUN apk --no-cache add ca-certificates
|
||||||
|
|
||||||
|
WORKDIR /root/
|
||||||
|
|
||||||
|
# Copy binary from builder
|
||||||
|
COPY --from=builder /app/server .
|
||||||
|
|
||||||
# ASP.NET Core listens on 8080 by default in .NET 8+
|
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
|
||||||
# Health check against /health endpoint
|
CMD ["./server"]
|
||||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
|
|
||||||
CMD curl --fail http://localhost:8080/health || exit 1
|
|
||||||
|
|
||||||
ENTRYPOINT ["dotnet", "Extrudex.dll"]
|
|
||||||
|
|||||||
90
backend/cmd/server/main.go
Normal file
90
backend/cmd/server/main.go
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/db"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/router"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// Setup structured logging
|
||||||
|
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{
|
||||||
|
Level: slog.LevelInfo,
|
||||||
|
})))
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
cfg, err := config.Load()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to load config", "error", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
slog.Info("config loaded", "port", cfg.Port, "cors_origin", cfg.CorsOrigin)
|
||||||
|
|
||||||
|
// Connect to database
|
||||||
|
dbPool, err := db.NewPool(cfg.DatabaseURL)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to connect to database", "error", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
defer db.ClosePool(dbPool)
|
||||||
|
|
||||||
|
slog.Info("database connected")
|
||||||
|
|
||||||
|
// Create SSE broadcaster and start it
|
||||||
|
sseBC := sse.NewBroadcaster(128)
|
||||||
|
sseBC.Start()
|
||||||
|
defer sseBC.Stop()
|
||||||
|
|
||||||
|
slog.Info("sse broadcaster started")
|
||||||
|
|
||||||
|
// Create router
|
||||||
|
r := router.New(cfg, dbPool, sseBC)
|
||||||
|
|
||||||
|
// Create HTTP server
|
||||||
|
// WriteTimeout is 0 for SSE support — the Chi middleware.Timeout(60s)
|
||||||
|
// handles request-level timeouts on non-SSE routes.
|
||||||
|
server := &http.Server{
|
||||||
|
Addr: ":" + cfg.Port,
|
||||||
|
Handler: r,
|
||||||
|
ReadTimeout: 15 * time.Second,
|
||||||
|
WriteTimeout: 0, // disabled for SSE long-lived connections
|
||||||
|
IdleTimeout: 60 * time.Second,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start server in goroutine
|
||||||
|
go func() {
|
||||||
|
slog.Info("server starting", "addr", server.Addr)
|
||||||
|
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||||
|
slog.Error("server error", "error", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Wait for shutdown signal
|
||||||
|
quit := make(chan os.Signal, 1)
|
||||||
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
|
<-quit
|
||||||
|
|
||||||
|
slog.Info("server shutting down")
|
||||||
|
|
||||||
|
// Graceful shutdown
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if err := server.Shutdown(ctx); err != nil {
|
||||||
|
slog.Error("server shutdown error", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db.ClosePool(dbPool)
|
||||||
|
slog.Info("server stopped")
|
||||||
|
}
|
||||||
18
backend/go.mod
Normal file
18
backend/go.mod
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
module github.com/CubeCraft-Creations/Extrudex/backend
|
||||||
|
|
||||||
|
go 1.24
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/go-chi/chi/v5 v5.2.0
|
||||||
|
github.com/jackc/pgx/v5 v5.7.4
|
||||||
|
github.com/kelseyhightower/envconfig v1.4.0
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||||
|
golang.org/x/crypto v0.31.0 // indirect
|
||||||
|
golang.org/x/sync v0.10.0 // indirect
|
||||||
|
golang.org/x/text v0.21.0 // indirect
|
||||||
|
)
|
||||||
32
backend/go.sum
Normal file
32
backend/go.sum
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/go-chi/chi/v5 v5.2.0 h1:Aj1EtB0qR2Rdo2dG4O94RIU35w2lvQSj6BRA4+qwFL0=
|
||||||
|
github.com/go-chi/chi/v5 v5.2.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
|
github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg=
|
||||||
|
github.com/jackc/pgx/v5 v5.7.4/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=
|
||||||
|
github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||||
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||||
|
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||||
|
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||||
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||||
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
24
backend/internal/config/config.go
Normal file
24
backend/internal/config/config.go
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/kelseyhightower/envconfig"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config holds all application configuration loaded from environment variables.
|
||||||
|
type Config struct {
|
||||||
|
DatabaseURL string `envconfig:"database_url" required:"true"`
|
||||||
|
Port string `envconfig:"port" default:"8080"`
|
||||||
|
CorsOrigin string `envconfig:"cors_origin" default:"*"`
|
||||||
|
LogLevel string `envconfig:"log_level" default:"info"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load reads configuration from environment variables and returns a populated Config.
|
||||||
|
func Load() (*Config, error) {
|
||||||
|
var cfg Config
|
||||||
|
if err := envconfig.Process("", &cfg); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load config: %w", err)
|
||||||
|
}
|
||||||
|
return &cfg, nil
|
||||||
|
}
|
||||||
34
backend/internal/db/db.go
Normal file
34
backend/internal/db/db.go
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
package db
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewPool creates a new pgx connection pool and verifies connectivity with a ping.
|
||||||
|
func NewPool(databaseURL string) (*pgxpool.Pool, error) {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
pool, err := pgxpool.New(ctx, databaseURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create db pool: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := pool.Ping(ctx); err != nil {
|
||||||
|
pool.Close()
|
||||||
|
return nil, fmt.Errorf("failed to ping db: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return pool, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClosePool gracefully closes the connection pool.
|
||||||
|
func ClosePool(pool *pgxpool.Pool) {
|
||||||
|
if pool != nil {
|
||||||
|
pool.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
67
backend/internal/dtos/dtos.go
Normal file
67
backend/internal/dtos/dtos.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
// Package dtos defines request/response data transfer objects for the Extrudex API.
|
||||||
|
// DTOs keep HTTP serialization concerns separate from domain models.
|
||||||
|
package dtos
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Common Response Wrappers
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// ListResponse wraps a paginated collection response.
|
||||||
|
type ListResponse struct {
|
||||||
|
Data any `json:"data"`
|
||||||
|
Total int `json:"total"`
|
||||||
|
Limit int `json:"limit"`
|
||||||
|
Offset int `json:"offset"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SingleResponse wraps a single-item response.
|
||||||
|
type SingleResponse struct {
|
||||||
|
Data any `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorResponse is the standard error payload for all API errors.
|
||||||
|
type ErrorResponse struct {
|
||||||
|
Error string `json:"error"`
|
||||||
|
Code int `json:"code"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Filament DTOs
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// CreateFilamentRequest is the POST body for creating a new filament spool.
|
||||||
|
type CreateFilamentRequest struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
MaterialBaseID int `json:"material_base_id"`
|
||||||
|
MaterialFinishID int `json:"material_finish_id"`
|
||||||
|
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||||
|
ColorHex string `json:"color_hex"`
|
||||||
|
Brand *string `json:"brand,omitempty"`
|
||||||
|
DiameterMM *float64 `json:"diameter_mm,omitempty"` // defaults to 1.75
|
||||||
|
InitialGrams int `json:"initial_grams"`
|
||||||
|
RemainingGrams int `json:"remaining_grams"`
|
||||||
|
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||||
|
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||||
|
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"` // defaults to 50
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
Barcode *string `json:"barcode,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateFilamentRequest is the PUT body for partially updating a filament spool.
|
||||||
|
// All fields are optional — only non-nil fields are applied.
|
||||||
|
type UpdateFilamentRequest struct {
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
MaterialBaseID *int `json:"material_base_id,omitempty"`
|
||||||
|
MaterialFinishID *int `json:"material_finish_id,omitempty"`
|
||||||
|
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||||
|
ColorHex *string `json:"color_hex,omitempty"`
|
||||||
|
Brand *string `json:"brand,omitempty"`
|
||||||
|
DiameterMM *float64 `json:"diameter_mm,omitempty"`
|
||||||
|
InitialGrams *int `json:"initial_grams,omitempty"`
|
||||||
|
RemainingGrams *int `json:"remaining_grams,omitempty"`
|
||||||
|
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||||
|
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||||
|
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"`
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
Barcode *string `json:"barcode,omitempty"`
|
||||||
|
}
|
||||||
273
backend/internal/handlers/filament_handler.go
Normal file
273
backend/internal/handlers/filament_handler.go
Normal file
@@ -0,0 +1,273 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FilamentHandler handles HTTP requests for filament spool CRUD operations.
|
||||||
|
type FilamentHandler struct {
|
||||||
|
service *services.FilamentService
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilamentHandler creates a FilamentHandler with the given service.
|
||||||
|
func NewFilamentHandler(service *services.FilamentService) *FilamentHandler {
|
||||||
|
return &FilamentHandler{service: service}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List handles GET /api/filaments — returns paginated, filtered spools.
|
||||||
|
func (h *FilamentHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
limit, offset := parsePagination(r)
|
||||||
|
filter := repositories.FilamentFilter{
|
||||||
|
Material: r.URL.Query().Get("material"),
|
||||||
|
Finish: r.URL.Query().Get("finish"),
|
||||||
|
Color: r.URL.Query().Get("color"),
|
||||||
|
LowStock: r.URL.Query().Get("low_stock") == "true",
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
spools, total, err := h.service.List(r.Context(), filter)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to list filaments", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||||
|
Data: spools,
|
||||||
|
Total: total,
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get handles GET /api/filaments/{id} — returns a single spool.
|
||||||
|
func (h *FilamentHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid filament ID",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
spool, err := h.service.GetByID(r.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to get filament", "id", id, "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if spool == nil {
|
||||||
|
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||||
|
Error: "filament not found",
|
||||||
|
Code: http.StatusNotFound,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: spool})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create handles POST /api/filaments — creates a new filament spool.
|
||||||
|
func (h *FilamentHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var req dtos.CreateFilamentRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid request body",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate required fields.
|
||||||
|
if err := services.ValidateCreateFilamentRequest(req); err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "validation failed: " + err.Error(),
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build domain model.
|
||||||
|
spool := models.FilamentSpool{
|
||||||
|
Name: req.Name,
|
||||||
|
MaterialBaseID: req.MaterialBaseID,
|
||||||
|
MaterialFinishID: req.MaterialFinishID,
|
||||||
|
MaterialModifierID: req.MaterialModifierID,
|
||||||
|
ColorHex: req.ColorHex,
|
||||||
|
Brand: req.Brand,
|
||||||
|
DiameterMM: 1.75, // default
|
||||||
|
InitialGrams: req.InitialGrams,
|
||||||
|
RemainingGrams: req.RemainingGrams,
|
||||||
|
SpoolWeightGrams: req.SpoolWeightGrams,
|
||||||
|
CostUSD: req.CostUSD,
|
||||||
|
LowStockThresholdGrams: 50, // default
|
||||||
|
Notes: req.Notes,
|
||||||
|
Barcode: req.Barcode,
|
||||||
|
}
|
||||||
|
if req.DiameterMM != nil {
|
||||||
|
spool.DiameterMM = *req.DiameterMM
|
||||||
|
}
|
||||||
|
if req.LowStockThresholdGrams != nil {
|
||||||
|
spool.LowStockThresholdGrams = *req.LowStockThresholdGrams
|
||||||
|
}
|
||||||
|
|
||||||
|
created, err := h.service.Create(r.Context(), &spool)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to create filament", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusCreated, dtos.SingleResponse{Data: created})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update handles PUT /api/filaments/{id} — partially updates a spool.
|
||||||
|
func (h *FilamentHandler) Update(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid filament ID",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req dtos.UpdateFilamentRequest
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid request body",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate update fields.
|
||||||
|
if err := services.ValidateUpdateFilamentRequest(req); err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "validation failed: " + err.Error(),
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build updates map (only non-nil fields).
|
||||||
|
updates := buildFilamentUpdates(req)
|
||||||
|
|
||||||
|
updated, err := h.service.Update(r.Context(), id, updates)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to update filament", "id", id, "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if updated == nil {
|
||||||
|
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||||
|
Error: "filament not found",
|
||||||
|
Code: http.StatusNotFound,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: updated})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete handles DELETE /api/filaments/{id} — soft-deletes a spool.
|
||||||
|
func (h *FilamentHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||||
|
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid filament ID",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
deleted, err := h.service.SoftDelete(r.Context(), id)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to delete filament", "id", id, "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !deleted {
|
||||||
|
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||||
|
Error: "filament not found",
|
||||||
|
Code: http.StatusNotFound,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildFilamentUpdates converts an UpdateFilamentRequest to a map of column→value.
|
||||||
|
func buildFilamentUpdates(req dtos.UpdateFilamentRequest) map[string]interface{} {
|
||||||
|
updates := make(map[string]interface{})
|
||||||
|
if req.Name != nil {
|
||||||
|
updates["name"] = *req.Name
|
||||||
|
}
|
||||||
|
if req.MaterialBaseID != nil {
|
||||||
|
updates["material_base_id"] = *req.MaterialBaseID
|
||||||
|
}
|
||||||
|
if req.MaterialFinishID != nil {
|
||||||
|
updates["material_finish_id"] = *req.MaterialFinishID
|
||||||
|
}
|
||||||
|
if req.MaterialModifierID != nil {
|
||||||
|
updates["material_modifier_id"] = *req.MaterialModifierID
|
||||||
|
}
|
||||||
|
if req.ColorHex != nil {
|
||||||
|
updates["color_hex"] = *req.ColorHex
|
||||||
|
}
|
||||||
|
if req.Brand != nil {
|
||||||
|
updates["brand"] = *req.Brand
|
||||||
|
}
|
||||||
|
if req.DiameterMM != nil {
|
||||||
|
updates["diameter_mm"] = *req.DiameterMM
|
||||||
|
}
|
||||||
|
if req.InitialGrams != nil {
|
||||||
|
updates["initial_grams"] = *req.InitialGrams
|
||||||
|
}
|
||||||
|
if req.RemainingGrams != nil {
|
||||||
|
updates["remaining_grams"] = *req.RemainingGrams
|
||||||
|
}
|
||||||
|
if req.SpoolWeightGrams != nil {
|
||||||
|
updates["spool_weight_grams"] = *req.SpoolWeightGrams
|
||||||
|
}
|
||||||
|
if req.CostUSD != nil {
|
||||||
|
updates["cost_usd"] = *req.CostUSD
|
||||||
|
}
|
||||||
|
if req.LowStockThresholdGrams != nil {
|
||||||
|
updates["low_stock_threshold_grams"] = *req.LowStockThresholdGrams
|
||||||
|
}
|
||||||
|
if req.Notes != nil {
|
||||||
|
updates["notes"] = *req.Notes
|
||||||
|
}
|
||||||
|
if req.Barcode != nil {
|
||||||
|
updates["barcode"] = *req.Barcode
|
||||||
|
}
|
||||||
|
return updates
|
||||||
|
}
|
||||||
50
backend/internal/handlers/health.go
Normal file
50
backend/internal/handlers/health.go
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HealthHandler provides a health check endpoint that verifies database connectivity.
|
||||||
|
type HealthHandler struct {
|
||||||
|
dbPool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHealthHandler creates a new HealthHandler with the given database pool.
|
||||||
|
func NewHealthHandler(dbPool *pgxpool.Pool) *HealthHandler {
|
||||||
|
return &HealthHandler{dbPool: dbPool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeHTTP handles GET /health requests.
|
||||||
|
func (h *HealthHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
dbConnected := false
|
||||||
|
if h.dbPool != nil {
|
||||||
|
if err := h.dbPool.Ping(ctx); err == nil {
|
||||||
|
dbConnected = true
|
||||||
|
} else {
|
||||||
|
slog.Warn("health check db ping failed", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp := map[string]any{
|
||||||
|
"status": "ok",
|
||||||
|
"timestamp": time.Now().UTC().Format(time.RFC3339),
|
||||||
|
"db_connected": dbConnected,
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
if !dbConnected {
|
||||||
|
w.WriteHeader(http.StatusServiceUnavailable)
|
||||||
|
}
|
||||||
|
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
||||||
|
slog.Error("failed to encode health response", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
51
backend/internal/handlers/helpers.go
Normal file
51
backend/internal/handlers/helpers.go
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// writeJSON serializes v as JSON to the response writer with the given status code.
|
||||||
|
// Logs an error if encoding fails.
|
||||||
|
func writeJSON(w http.ResponseWriter, status int, v interface{}) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(status)
|
||||||
|
if err := json.NewEncoder(w).Encode(v); err != nil {
|
||||||
|
slog.Error("failed to encode JSON response", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parsePagination reads limit and offset query parameters with defaults of 20 and 0.
|
||||||
|
func parsePagination(r *http.Request) (limit, offset int) {
|
||||||
|
limit = 20
|
||||||
|
offset = 0
|
||||||
|
|
||||||
|
if l := r.URL.Query().Get("limit"); l != "" {
|
||||||
|
if parsed, err := strconv.Atoi(l); err == nil && parsed > 0 {
|
||||||
|
limit = parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if o := r.URL.Query().Get("offset"); o != "" {
|
||||||
|
if parsed, err := strconv.Atoi(o); err == nil && parsed >= 0 {
|
||||||
|
offset = parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateCreateFilamentRequest validates a CreateFilamentRequest DTO.
|
||||||
|
// Re-exports the service-layer validator for handler use.
|
||||||
|
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||||
|
return services.ValidateCreateFilamentRequest(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateUpdateFilamentRequest validates an UpdateFilamentRequest DTO.
|
||||||
|
// Re-exports the service-layer validator for handler use.
|
||||||
|
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||||
|
return services.ValidateUpdateFilamentRequest(req)
|
||||||
|
}
|
||||||
34
backend/internal/handlers/material_handler.go
Normal file
34
backend/internal/handlers/material_handler.go
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MaterialHandler handles requests for material lookup data.
|
||||||
|
type MaterialHandler struct {
|
||||||
|
repo *repositories.MaterialRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMaterialHandler creates a MaterialHandler with the given repository.
|
||||||
|
func NewMaterialHandler(repo *repositories.MaterialRepository) *MaterialHandler {
|
||||||
|
return &MaterialHandler{repo: repo}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List handles GET /api/materials — returns all material bases.
|
||||||
|
func (h *MaterialHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
materials, err := h.repo.GetAll(r.Context())
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to list materials", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: materials})
|
||||||
|
}
|
||||||
60
backend/internal/handlers/print_job_handler.go
Normal file
60
backend/internal/handlers/print_job_handler.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrintJobHandler handles HTTP requests for print job operations.
|
||||||
|
type PrintJobHandler struct {
|
||||||
|
service *services.PrintJobService
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrintJobHandler creates a PrintJobHandler with the given service.
|
||||||
|
func NewPrintJobHandler(service *services.PrintJobService) *PrintJobHandler {
|
||||||
|
return &PrintJobHandler{service: service}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List handles GET /api/print-jobs — returns paginated, filtered print jobs.
|
||||||
|
func (h *PrintJobHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
limit, offset := parsePagination(r)
|
||||||
|
filter := repositories.PrintJobFilter{
|
||||||
|
Status: r.URL.Query().Get("status"),
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
if pidStr := r.URL.Query().Get("printer_id"); pidStr != "" {
|
||||||
|
pid, err := strconv.Atoi(pidStr)
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid printer_id",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filter.PrinterID = &pid
|
||||||
|
}
|
||||||
|
|
||||||
|
jobs, total, err := h.service.List(r.Context(), filter)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to list print jobs", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||||
|
Data: jobs,
|
||||||
|
Total: total,
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
})
|
||||||
|
}
|
||||||
34
backend/internal/handlers/printer_handler.go
Normal file
34
backend/internal/handlers/printer_handler.go
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrinterHandler handles HTTP requests for printer listings.
|
||||||
|
type PrinterHandler struct {
|
||||||
|
service *services.PrinterService
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrinterHandler creates a PrinterHandler with the given service.
|
||||||
|
func NewPrinterHandler(service *services.PrinterService) *PrinterHandler {
|
||||||
|
return &PrinterHandler{service: service}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List handles GET /api/printers — returns all printers with printer_type info.
|
||||||
|
func (h *PrinterHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
printers, err := h.service.List(r.Context())
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to list printers", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: printers})
|
||||||
|
}
|
||||||
70
backend/internal/handlers/usage_log_handler.go
Normal file
70
backend/internal/handlers/usage_log_handler.go
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
)
|
||||||
|
|
||||||
|
// UsageLogHandler handles HTTP requests for usage log operations.
|
||||||
|
type UsageLogHandler struct {
|
||||||
|
repo *repositories.UsageLogRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewUsageLogHandler creates a UsageLogHandler with the given repository.
|
||||||
|
func NewUsageLogHandler(repo *repositories.UsageLogRepository) *UsageLogHandler {
|
||||||
|
return &UsageLogHandler{repo: repo}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List handles GET /api/usage-logs — returns paginated, filtered usage logs.
|
||||||
|
func (h *UsageLogHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||||
|
limit, offset := parsePagination(r)
|
||||||
|
filter := repositories.UsageLogFilter{
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
|
||||||
|
if sidStr := r.URL.Query().Get("spool_id"); sidStr != "" {
|
||||||
|
sid, err := strconv.Atoi(sidStr)
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid spool_id",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filter.SpoolID = &sid
|
||||||
|
}
|
||||||
|
|
||||||
|
if jidStr := r.URL.Query().Get("job_id"); jidStr != "" {
|
||||||
|
jid, err := strconv.Atoi(jidStr)
|
||||||
|
if err != nil {
|
||||||
|
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||||
|
Error: "invalid job_id",
|
||||||
|
Code: http.StatusBadRequest,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
filter.JobID = &jid
|
||||||
|
}
|
||||||
|
|
||||||
|
logs, total, err := h.repo.GetAll(r.Context(), filter)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("failed to list usage logs", "error", err)
|
||||||
|
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||||
|
Error: "internal server error",
|
||||||
|
Code: http.StatusInternalServerError,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||||
|
Data: logs,
|
||||||
|
Total: total,
|
||||||
|
Limit: limit,
|
||||||
|
Offset: offset,
|
||||||
|
})
|
||||||
|
}
|
||||||
162
backend/internal/models/models.go
Normal file
162
backend/internal/models/models.go
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
// Package models defines the Extrudex domain model structs.
|
||||||
|
// These map 1:1 to PostgreSQL tables with snake_case JSON serialization.
|
||||||
|
// Nullable fields use pointer types; all timestamps are time.Time.
|
||||||
|
package models
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Lookup Tables
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// PrinterType represents a printer technology category (fdm, resin, etc.).
|
||||||
|
type PrinterType struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// JobStatus represents a print job lifecycle state.
|
||||||
|
type JobStatus struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaterialBase represents a base material type (PLA, PETG, ABS, etc.).
|
||||||
|
// Density and temperature ranges are stored here for grams-calculation and slicing guidance.
|
||||||
|
type MaterialBase struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
DensityGCm3 float64 `json:"density_g_cm3"`
|
||||||
|
ExtrusionTempMin *int `json:"extrusion_temp_min,omitempty"`
|
||||||
|
ExtrusionTempMax *int `json:"extrusion_temp_max,omitempty"`
|
||||||
|
BedTempMin *int `json:"bed_temp_min,omitempty"`
|
||||||
|
BedTempMax *int `json:"bed_temp_max,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaterialFinish represents the visual/texture finish (Basic, Silk, Matte, etc.).
|
||||||
|
type MaterialFinish struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaterialModifier represents an additive property (Carbon Fiber, Wood-Filled, etc.).
|
||||||
|
type MaterialModifier struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Core Entity Tables
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Printer represents a 3D printer in the fleet.
|
||||||
|
type Printer struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
PrinterTypeID int `json:"printer_type_id"`
|
||||||
|
PrinterType *PrinterType `json:"printer_type,omitempty"` // populated on JOIN queries
|
||||||
|
Manufacturer *string `json:"manufacturer,omitempty"`
|
||||||
|
Model *string `json:"model,omitempty"`
|
||||||
|
MoonrakerURL *string `json:"moonraker_url,omitempty"`
|
||||||
|
MoonrakerAPIKey *string `json:"moonraker_api_key,omitempty"`
|
||||||
|
MQTTBrokerHost *string `json:"mqtt_broker_host,omitempty"`
|
||||||
|
MQTTTopicPrefix *string `json:"mqtt_topic_prefix,omitempty"`
|
||||||
|
MQTTTLSEnabled bool `json:"mqtt_tls_enabled"`
|
||||||
|
IsActive bool `json:"is_active"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilamentSpool represents a physical filament spool in inventory.
|
||||||
|
// material_finish_id defaults to 1 ("Basic"); material_modifier_id is optional.
|
||||||
|
// Grams are always physically measured values — grams_used is derived, not stored.
|
||||||
|
type FilamentSpool struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
MaterialBaseID int `json:"material_base_id"`
|
||||||
|
MaterialBase *MaterialBase `json:"material_base,omitempty"` // JOIN
|
||||||
|
MaterialFinishID int `json:"material_finish_id"`
|
||||||
|
MaterialFinish *MaterialFinish `json:"material_finish,omitempty"` // JOIN
|
||||||
|
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||||
|
MaterialModifier *MaterialModifier `json:"material_modifier,omitempty"` // JOIN
|
||||||
|
ColorHex string `json:"color_hex"`
|
||||||
|
Brand *string `json:"brand,omitempty"`
|
||||||
|
DiameterMM float64 `json:"diameter_mm"`
|
||||||
|
InitialGrams int `json:"initial_grams"`
|
||||||
|
RemainingGrams int `json:"remaining_grams"`
|
||||||
|
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||||
|
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||||
|
LowStockThresholdGrams int `json:"low_stock_threshold_grams"`
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
Barcode *string `json:"barcode,omitempty"`
|
||||||
|
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrintJob represents a single print on a specific printer.
|
||||||
|
// The filament_spool_id is a convenience reference; multi-spool jobs track usage in usage_logs.
|
||||||
|
type PrintJob struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
PrinterID int `json:"printer_id"`
|
||||||
|
Printer *Printer `json:"printer,omitempty"` // JOIN
|
||||||
|
FilamentSpoolID *int `json:"filament_spool_id,omitempty"`
|
||||||
|
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||||
|
JobName string `json:"job_name"`
|
||||||
|
FileName *string `json:"file_name,omitempty"`
|
||||||
|
JobStatusID int `json:"job_status_id"`
|
||||||
|
JobStatus *JobStatus `json:"job_status,omitempty"` // JOIN
|
||||||
|
StartedAt *time.Time `json:"started_at,omitempty"`
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||||
|
EstimatedDurationSeconds *int `json:"estimated_duration_seconds,omitempty"`
|
||||||
|
TotalMMExtruded *float64 `json:"total_mm_extruded,omitempty"`
|
||||||
|
TotalGramsUsed *float64 `json:"total_grams_used,omitempty"`
|
||||||
|
TotalCostUSD *float64 `json:"total_cost_usd,omitempty"`
|
||||||
|
Notes *string `json:"notes,omitempty"`
|
||||||
|
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsageLog records filament consumption for a specific spool during a print job.
|
||||||
|
// This is the atomic unit of filament tracking — grams are derived from mm_extruded.
|
||||||
|
type UsageLog struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
PrintJobID int `json:"print_job_id"`
|
||||||
|
PrintJob *PrintJob `json:"print_job,omitempty"` // JOIN
|
||||||
|
FilamentSpoolID int `json:"filament_spool_id"`
|
||||||
|
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||||
|
MMExtruded float64 `json:"mm_extruded"`
|
||||||
|
GramsUsed float64 `json:"grams_used"`
|
||||||
|
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||||
|
LoggedAt time.Time `json:"logged_at"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Application Settings
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// Setting represents a key-value application configuration entry.
|
||||||
|
// The value is stored as JSONB in PostgreSQL, allowing flexible typed config.
|
||||||
|
type Setting struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Value []byte `json:"value"` // raw JSON — marshalled/unmarshalled by caller
|
||||||
|
Description *string `json:"description,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
285
backend/internal/repositories/filament_repository.go
Normal file
285
backend/internal/repositories/filament_repository.go
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FilamentRepository handles database queries for filament_spools.
|
||||||
|
type FilamentRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilamentRepository creates a FilamentRepository backed by the given pool.
|
||||||
|
func NewFilamentRepository(pool *pgxpool.Pool) *FilamentRepository {
|
||||||
|
return &FilamentRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilamentFilter holds query parameters for listing filament spools.
|
||||||
|
type FilamentFilter struct {
|
||||||
|
Material string // filter by material_base name (case-insensitive)
|
||||||
|
Finish string // filter by material_finish name (case-insensitive)
|
||||||
|
Color string // filter by exact color_hex match
|
||||||
|
LowStock bool // if true, filter for remaining_grams <= low_stock_threshold_grams
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// spoolScanFields is the common SELECT column list for filament spools with JOINs.
|
||||||
|
const spoolScanFields = `
|
||||||
|
s.id, s.name,
|
||||||
|
s.material_base_id,
|
||||||
|
COALESCE(mb.name, '') as material_base_name,
|
||||||
|
COALESCE(mb.density_g_cm3, 0) as material_base_density_g_cm3,
|
||||||
|
COALESCE(mb.extrusion_temp_min, NULL::int) as material_base_extrusion_temp_min,
|
||||||
|
COALESCE(mb.extrusion_temp_max, NULL::int) as material_base_extrusion_temp_max,
|
||||||
|
COALESCE(mb.bed_temp_min, NULL::int) as material_base_bed_temp_min,
|
||||||
|
COALESCE(mb.bed_temp_max, NULL::int) as material_base_bed_temp_max,
|
||||||
|
COALESCE(mb.created_at, s.created_at) as material_base_created_at,
|
||||||
|
COALESCE(mb.updated_at, s.created_at) as material_base_updated_at,
|
||||||
|
s.material_finish_id,
|
||||||
|
COALESCE(mf.name, '') as material_finish_name,
|
||||||
|
mf.description as material_finish_description,
|
||||||
|
COALESCE(mf.created_at, s.created_at) as material_finish_created_at,
|
||||||
|
COALESCE(mf.updated_at, s.created_at) as material_finish_updated_at,
|
||||||
|
s.material_modifier_id,
|
||||||
|
mm.name as material_modifier_name,
|
||||||
|
mm.description as material_modifier_description,
|
||||||
|
mm.created_at as material_modifier_created_at,
|
||||||
|
mm.updated_at as material_modifier_updated_at,
|
||||||
|
s.color_hex, s.brand, s.diameter_mm,
|
||||||
|
s.initial_grams, s.remaining_grams, s.spool_weight_grams,
|
||||||
|
s.cost_usd, s.low_stock_threshold_grams,
|
||||||
|
s.notes, s.barcode,
|
||||||
|
s.deleted_at, s.created_at, s.updated_at`
|
||||||
|
|
||||||
|
const spoolFromJoins = `
|
||||||
|
FROM filament_spools s
|
||||||
|
LEFT JOIN material_bases mb ON s.material_base_id = mb.id
|
||||||
|
LEFT JOIN material_finishes mf ON s.material_finish_id = mf.id
|
||||||
|
LEFT JOIN material_modifiers mm ON s.material_modifier_id = mm.id`
|
||||||
|
|
||||||
|
// scanSpoolWithJoins scans a full spool row including all JOINed tables.
|
||||||
|
func scanSpoolWithJoins(row interface{ Scan(...interface{}) error }) (models.FilamentSpool, error) {
|
||||||
|
var s models.FilamentSpool
|
||||||
|
var mb models.MaterialBase
|
||||||
|
var mf models.MaterialFinish
|
||||||
|
var mfDesc *string
|
||||||
|
var modifierID *int
|
||||||
|
var modName, modDesc *string
|
||||||
|
var modCreatedAt, modUpdatedAt *time.Time
|
||||||
|
|
||||||
|
err := row.Scan(
|
||||||
|
&s.ID, &s.Name,
|
||||||
|
&s.MaterialBaseID,
|
||||||
|
&mb.Name, &mb.DensityGCm3,
|
||||||
|
&mb.ExtrusionTempMin, &mb.ExtrusionTempMax,
|
||||||
|
&mb.BedTempMin, &mb.BedTempMax,
|
||||||
|
&mb.CreatedAt, &mb.UpdatedAt,
|
||||||
|
&s.MaterialFinishID,
|
||||||
|
&mf.Name, &mfDesc,
|
||||||
|
&mf.CreatedAt, &mf.UpdatedAt,
|
||||||
|
&modifierID,
|
||||||
|
&modName, &modDesc,
|
||||||
|
&modCreatedAt, &modUpdatedAt,
|
||||||
|
&s.ColorHex, &s.Brand, &s.DiameterMM,
|
||||||
|
&s.InitialGrams, &s.RemainingGrams, &s.SpoolWeightGrams,
|
||||||
|
&s.CostUSD, &s.LowStockThresholdGrams,
|
||||||
|
&s.Notes, &s.Barcode,
|
||||||
|
&s.DeletedAt, &s.CreatedAt, &s.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return s, err
|
||||||
|
}
|
||||||
|
|
||||||
|
mb.ID = s.MaterialBaseID
|
||||||
|
s.MaterialBase = &mb
|
||||||
|
|
||||||
|
mf.ID = s.MaterialFinishID
|
||||||
|
if mfDesc != nil {
|
||||||
|
mf.Description = mfDesc
|
||||||
|
}
|
||||||
|
s.MaterialFinish = &mf
|
||||||
|
|
||||||
|
s.MaterialModifierID = modifierID
|
||||||
|
if modifierID != nil && modName != nil {
|
||||||
|
mm := models.MaterialModifier{
|
||||||
|
ID: *modifierID,
|
||||||
|
Name: *modName,
|
||||||
|
}
|
||||||
|
if modDesc != nil {
|
||||||
|
mm.Description = modDesc
|
||||||
|
}
|
||||||
|
if modCreatedAt != nil {
|
||||||
|
mm.CreatedAt = *modCreatedAt
|
||||||
|
}
|
||||||
|
if modUpdatedAt != nil {
|
||||||
|
mm.UpdatedAt = *modUpdatedAt
|
||||||
|
}
|
||||||
|
s.MaterialModifier = &mm
|
||||||
|
}
|
||||||
|
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAll returns filament spools matching the given filters, with pagination.
|
||||||
|
// Returns results, total matching count, and any error.
|
||||||
|
func (r *FilamentRepository) GetAll(ctx context.Context, filter FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||||
|
conditions := []string{"s.deleted_at IS NULL"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIdx := 1
|
||||||
|
|
||||||
|
if filter.Material != "" {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("LOWER(mb.name) = LOWER($%d)", argIdx))
|
||||||
|
args = append(args, filter.Material)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filter.Finish != "" {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("LOWER(mf.name) = LOWER($%d)", argIdx))
|
||||||
|
args = append(args, filter.Finish)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filter.Color != "" {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("s.color_hex = $%d", argIdx))
|
||||||
|
args = append(args, filter.Color)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filter.LowStock {
|
||||||
|
conditions = append(conditions, "s.remaining_grams <= s.low_stock_threshold_grams")
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClause := ""
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count total.
|
||||||
|
var total int
|
||||||
|
countQuery := "SELECT COUNT(*) " + spoolFromJoins + " " + whereClause
|
||||||
|
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query with pagination.
|
||||||
|
dataQuery := "SELECT " + spoolScanFields + " " + spoolFromJoins + " " +
|
||||||
|
whereClause +
|
||||||
|
" ORDER BY s.name ASC" +
|
||||||
|
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||||
|
|
||||||
|
dataArgs := make([]interface{}, len(args))
|
||||||
|
copy(dataArgs, args)
|
||||||
|
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||||
|
|
||||||
|
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var spools []models.FilamentSpool
|
||||||
|
for rows.Next() {
|
||||||
|
s, err := scanSpoolWithJoins(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
spools = append(spools, s)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if spools == nil {
|
||||||
|
spools = []models.FilamentSpool{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return spools, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByID returns a single filament spool by ID with JOINed data.
|
||||||
|
// Returns nil if not found or soft-deleted.
|
||||||
|
func (r *FilamentRepository) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||||
|
query := "SELECT " + spoolScanFields + " " + spoolFromJoins +
|
||||||
|
" WHERE s.id = $1 AND s.deleted_at IS NULL"
|
||||||
|
|
||||||
|
row := r.pool.QueryRow(ctx, query, id)
|
||||||
|
s, err := scanSpoolWithJoins(row)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create inserts a new filament spool and returns the created spool with JOINed data.
|
||||||
|
func (r *FilamentRepository) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||||
|
var id int
|
||||||
|
err := r.pool.QueryRow(ctx, `
|
||||||
|
INSERT INTO filament_spools (
|
||||||
|
name, material_base_id, material_finish_id, material_modifier_id,
|
||||||
|
color_hex, brand, diameter_mm, initial_grams, remaining_grams,
|
||||||
|
spool_weight_grams, cost_usd, low_stock_threshold_grams,
|
||||||
|
notes, barcode
|
||||||
|
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)
|
||||||
|
RETURNING id
|
||||||
|
`,
|
||||||
|
spool.Name, spool.MaterialBaseID, spool.MaterialFinishID, spool.MaterialModifierID,
|
||||||
|
spool.ColorHex, spool.Brand, spool.DiameterMM, spool.InitialGrams, spool.RemainingGrams,
|
||||||
|
spool.SpoolWeightGrams, spool.CostUSD, spool.LowStockThresholdGrams,
|
||||||
|
spool.Notes, spool.Barcode,
|
||||||
|
).Scan(&id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.GetByID(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update applies partial updates to an existing filament spool.
|
||||||
|
// Only non-nil fields in the update map are applied.
|
||||||
|
// Returns the updated spool.
|
||||||
|
func (r *FilamentRepository) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||||
|
if len(updates) == 0 {
|
||||||
|
return r.GetByID(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
setClauses := []string{"updated_at = NOW()"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIdx := 1
|
||||||
|
|
||||||
|
for col, val := range updates {
|
||||||
|
setClauses = append(setClauses, fmt.Sprintf("%s = $%d", col, argIdx))
|
||||||
|
args = append(args, val)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
|
||||||
|
args = append(args, id)
|
||||||
|
query := fmt.Sprintf("UPDATE filament_spools SET %s WHERE id = $%d AND deleted_at IS NULL",
|
||||||
|
strings.Join(setClauses, ", "), argIdx)
|
||||||
|
|
||||||
|
result, err := r.pool.Exec(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if result.RowsAffected() == 0 {
|
||||||
|
return nil, nil // not found or deleted
|
||||||
|
}
|
||||||
|
|
||||||
|
return r.GetByID(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoftDelete marks a filament spool as deleted by setting deleted_at = NOW().
|
||||||
|
// Returns true if a row was affected.
|
||||||
|
func (r *FilamentRepository) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||||
|
result, err := r.pool.Exec(ctx, `
|
||||||
|
UPDATE filament_spools
|
||||||
|
SET deleted_at = NOW(), updated_at = NOW()
|
||||||
|
WHERE id = $1 AND deleted_at IS NULL
|
||||||
|
`, id)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return result.RowsAffected() > 0, nil
|
||||||
|
}
|
||||||
54
backend/internal/repositories/material_repository.go
Normal file
54
backend/internal/repositories/material_repository.go
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
// Package repositories provides data access logic backed by PostgreSQL via pgxpool.
|
||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MaterialRepository handles database queries for material lookup tables.
|
||||||
|
type MaterialRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMaterialRepository creates a MaterialRepository backed by the given pool.
|
||||||
|
func NewMaterialRepository(pool *pgxpool.Pool) *MaterialRepository {
|
||||||
|
return &MaterialRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAll returns all material bases ordered by name.
|
||||||
|
func (r *MaterialRepository) GetAll(ctx context.Context) ([]models.MaterialBase, error) {
|
||||||
|
rows, err := r.pool.Query(ctx, `
|
||||||
|
SELECT id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max,
|
||||||
|
bed_temp_min, bed_temp_max, created_at, updated_at
|
||||||
|
FROM material_bases
|
||||||
|
ORDER BY name
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var materials []models.MaterialBase
|
||||||
|
for rows.Next() {
|
||||||
|
var m models.MaterialBase
|
||||||
|
if err := rows.Scan(
|
||||||
|
&m.ID, &m.Name, &m.DensityGCm3,
|
||||||
|
&m.ExtrusionTempMin, &m.ExtrusionTempMax,
|
||||||
|
&m.BedTempMin, &m.BedTempMax,
|
||||||
|
&m.CreatedAt, &m.UpdatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
materials = append(materials, m)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if materials == nil {
|
||||||
|
materials = []models.MaterialBase{}
|
||||||
|
}
|
||||||
|
return materials, nil
|
||||||
|
}
|
||||||
157
backend/internal/repositories/print_job_repository.go
Normal file
157
backend/internal/repositories/print_job_repository.go
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrintJobRepository handles database queries for print_jobs.
|
||||||
|
type PrintJobRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrintJobRepository creates a PrintJobRepository backed by the given pool.
|
||||||
|
func NewPrintJobRepository(pool *pgxpool.Pool) *PrintJobRepository {
|
||||||
|
return &PrintJobRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrintJobFilter holds query parameters for listing print jobs.
|
||||||
|
type PrintJobFilter struct {
|
||||||
|
Status string // filter by job_status name (case-insensitive)
|
||||||
|
PrinterID *int // filter by printer_id
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanPrintJobWithJoins scans a print_job row with JOINed tables.
|
||||||
|
func (r *PrintJobRepository) scanPrintJobWithJoins(row interface{ Scan(...interface{}) error }) (models.PrintJob, error) {
|
||||||
|
var pj models.PrintJob
|
||||||
|
var js models.JobStatus
|
||||||
|
|
||||||
|
err := row.Scan(
|
||||||
|
&pj.ID, &pj.PrinterID, &pj.FilamentSpoolID,
|
||||||
|
&pj.JobName, &pj.FileName,
|
||||||
|
&pj.JobStatusID,
|
||||||
|
&pj.StartedAt, &pj.CompletedAt,
|
||||||
|
&pj.DurationSeconds, &pj.EstimatedDurationSeconds,
|
||||||
|
&pj.TotalMMExtruded, &pj.TotalGramsUsed, &pj.TotalCostUSD,
|
||||||
|
&pj.Notes,
|
||||||
|
&pj.DeletedAt, &pj.CreatedAt, &pj.UpdatedAt,
|
||||||
|
&js.ID, &js.Name,
|
||||||
|
&js.CreatedAt, &js.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return pj, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pj.JobStatus = &js
|
||||||
|
return pj, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAll returns print jobs matching the given filters, with pagination.
|
||||||
|
func (r *PrintJobRepository) GetAll(ctx context.Context, filter PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||||
|
conditions := []string{"pj.deleted_at IS NULL"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIdx := 1
|
||||||
|
|
||||||
|
if filter.Status != "" {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("LOWER(js.name) = LOWER($%d)", argIdx))
|
||||||
|
args = append(args, filter.Status)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filter.PrinterID != nil {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("pj.printer_id = $%d", argIdx))
|
||||||
|
args = append(args, *filter.PrinterID)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClause := ""
|
||||||
|
if len(conditions) > 0 {
|
||||||
|
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count.
|
||||||
|
var total int
|
||||||
|
countQuery := `SELECT COUNT(*)
|
||||||
|
FROM print_jobs pj
|
||||||
|
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||||
|
` + " " + whereClause
|
||||||
|
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query with pagination.
|
||||||
|
dataQuery := `SELECT
|
||||||
|
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||||
|
pj.job_name, pj.file_name,
|
||||||
|
pj.job_status_id,
|
||||||
|
pj.started_at, pj.completed_at,
|
||||||
|
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||||
|
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||||
|
pj.notes,
|
||||||
|
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||||
|
js.id, js.name,
|
||||||
|
js.created_at, js.updated_at
|
||||||
|
FROM print_jobs pj
|
||||||
|
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||||
|
` + whereClause +
|
||||||
|
" ORDER BY pj.created_at DESC" +
|
||||||
|
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||||
|
|
||||||
|
dataArgs := make([]interface{}, len(args))
|
||||||
|
copy(dataArgs, args)
|
||||||
|
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||||
|
|
||||||
|
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var jobs []models.PrintJob
|
||||||
|
for rows.Next() {
|
||||||
|
pj, err := r.scanPrintJobWithJoins(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
jobs = append(jobs, pj)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if jobs == nil {
|
||||||
|
jobs = []models.PrintJob{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return jobs, total, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByID returns a single print job by ID with JOINed job_status.
|
||||||
|
func (r *PrintJobRepository) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||||
|
row := r.pool.QueryRow(ctx, `
|
||||||
|
SELECT
|
||||||
|
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||||
|
pj.job_name, pj.file_name,
|
||||||
|
pj.job_status_id,
|
||||||
|
pj.started_at, pj.completed_at,
|
||||||
|
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||||
|
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||||
|
pj.notes,
|
||||||
|
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||||
|
js.id, js.name,
|
||||||
|
js.created_at, js.updated_at
|
||||||
|
FROM print_jobs pj
|
||||||
|
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||||
|
WHERE pj.id = $1 AND pj.deleted_at IS NULL
|
||||||
|
`, id)
|
||||||
|
|
||||||
|
pj, err := r.scanPrintJobWithJoins(row)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &pj, nil
|
||||||
|
}
|
||||||
78
backend/internal/repositories/printer_repository.go
Normal file
78
backend/internal/repositories/printer_repository.go
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrinterRepository handles database queries for printers.
|
||||||
|
type PrinterRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrinterRepository creates a PrinterRepository backed by the given pool.
|
||||||
|
func NewPrinterRepository(pool *pgxpool.Pool) *PrinterRepository {
|
||||||
|
return &PrinterRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanPrinterWithType scans a printer row with JOINed printer_type.
|
||||||
|
func (r *PrinterRepository) scanPrinterWithType(row interface{ Scan(...interface{}) error }) (models.Printer, error) {
|
||||||
|
var p models.Printer
|
||||||
|
var pt models.PrinterType
|
||||||
|
|
||||||
|
err := row.Scan(
|
||||||
|
&p.ID, &p.Name, &p.PrinterTypeID,
|
||||||
|
&p.Manufacturer, &p.Model,
|
||||||
|
&p.MoonrakerURL, &p.MoonrakerAPIKey,
|
||||||
|
&p.MQTTBrokerHost, &p.MQTTTopicPrefix,
|
||||||
|
&p.MQTTTLSEnabled, &p.IsActive,
|
||||||
|
&p.CreatedAt, &p.UpdatedAt,
|
||||||
|
&pt.ID, &pt.Name,
|
||||||
|
&pt.CreatedAt, &pt.UpdatedAt,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return p, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p.PrinterType = &pt
|
||||||
|
return p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAll returns all printers joined with their printer_type, ordered by name.
|
||||||
|
func (r *PrinterRepository) GetAll(ctx context.Context) ([]models.Printer, error) {
|
||||||
|
rows, err := r.pool.Query(ctx, `
|
||||||
|
SELECT p.id, p.name, p.printer_type_id,
|
||||||
|
p.manufacturer, p.model,
|
||||||
|
p.moonraker_url, p.moonraker_api_key,
|
||||||
|
p.mqtt_broker_host, p.mqtt_topic_prefix,
|
||||||
|
p.mqtt_tls_enabled, p.is_active,
|
||||||
|
p.created_at, p.updated_at,
|
||||||
|
pt.id, pt.name,
|
||||||
|
pt.created_at, pt.updated_at
|
||||||
|
FROM printers p
|
||||||
|
JOIN printer_types pt ON p.printer_type_id = pt.id
|
||||||
|
ORDER BY p.name
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var printers []models.Printer
|
||||||
|
for rows.Next() {
|
||||||
|
p, err := r.scanPrinterWithType(rows)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
printers = append(printers, p)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if printers == nil {
|
||||||
|
printers = []models.Printer{}
|
||||||
|
}
|
||||||
|
return printers, nil
|
||||||
|
}
|
||||||
96
backend/internal/repositories/usage_log_repository.go
Normal file
96
backend/internal/repositories/usage_log_repository.go
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
package repositories
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// UsageLogRepository handles database queries for usage_logs.
|
||||||
|
type UsageLogRepository struct {
|
||||||
|
pool *pgxpool.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewUsageLogRepository creates a UsageLogRepository backed by the given pool.
|
||||||
|
func NewUsageLogRepository(pool *pgxpool.Pool) *UsageLogRepository {
|
||||||
|
return &UsageLogRepository{pool: pool}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsageLogFilter holds query parameters for listing usage logs.
|
||||||
|
type UsageLogFilter struct {
|
||||||
|
SpoolID *int // filter by filament_spool_id
|
||||||
|
JobID *int // filter by print_job_id
|
||||||
|
Limit int
|
||||||
|
Offset int
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAll returns usage logs matching the given filters, with pagination.
|
||||||
|
func (r *UsageLogRepository) GetAll(ctx context.Context, filter UsageLogFilter) ([]models.UsageLog, int, error) {
|
||||||
|
conditions := []string{"1=1"}
|
||||||
|
args := []interface{}{}
|
||||||
|
argIdx := 1
|
||||||
|
|
||||||
|
if filter.SpoolID != nil {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("ul.filament_spool_id = $%d", argIdx))
|
||||||
|
args = append(args, *filter.SpoolID)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
if filter.JobID != nil {
|
||||||
|
conditions = append(conditions, fmt.Sprintf("ul.print_job_id = $%d", argIdx))
|
||||||
|
args = append(args, *filter.JobID)
|
||||||
|
argIdx++
|
||||||
|
}
|
||||||
|
|
||||||
|
whereClause := "WHERE " + fmt.Sprintf("%s", conditions[0])
|
||||||
|
for _, c := range conditions[1:] {
|
||||||
|
whereClause += " AND " + c
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count.
|
||||||
|
var total int
|
||||||
|
countQuery := "SELECT COUNT(*) FROM usage_logs ul " + whereClause
|
||||||
|
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query with pagination.
|
||||||
|
dataQuery := `SELECT id, print_job_id, filament_spool_id, mm_extruded,
|
||||||
|
grams_used, cost_usd, logged_at, created_at
|
||||||
|
FROM usage_logs ul
|
||||||
|
` + whereClause +
|
||||||
|
" ORDER BY ul.logged_at DESC" +
|
||||||
|
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||||
|
|
||||||
|
dataArgs := make([]interface{}, len(args))
|
||||||
|
copy(dataArgs, args)
|
||||||
|
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||||
|
|
||||||
|
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var logs []models.UsageLog
|
||||||
|
for rows.Next() {
|
||||||
|
var l models.UsageLog
|
||||||
|
if err := rows.Scan(
|
||||||
|
&l.ID, &l.PrintJobID, &l.FilamentSpoolID,
|
||||||
|
&l.MMExtruded, &l.GramsUsed, &l.CostUSD,
|
||||||
|
&l.LoggedAt, &l.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
logs = append(logs, l)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
if logs == nil {
|
||||||
|
logs = []models.UsageLog{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return logs, total, nil
|
||||||
|
}
|
||||||
90
backend/internal/router/router.go
Normal file
90
backend/internal/router/router.go
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
package router
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/handlers"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/go-chi/chi/v5/middleware"
|
||||||
|
"github.com/jackc/pgx/v5/pgxpool"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New creates and configures a Chi router with all middleware and handlers mounted.
|
||||||
|
func New(cfg *config.Config, dbPool *pgxpool.Pool, sseBC *sse.Broadcaster) chi.Router {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
// Middleware
|
||||||
|
r.Use(middleware.RequestID)
|
||||||
|
r.Use(middleware.RealIP)
|
||||||
|
r.Use(middleware.Logger)
|
||||||
|
r.Use(middleware.Recoverer)
|
||||||
|
// Timeout middleware is applied per-route below to exclude SSE
|
||||||
|
|
||||||
|
// CORS
|
||||||
|
r.Use(func(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Access-Control-Allow-Origin", cfg.CorsOrigin)
|
||||||
|
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||||
|
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||||
|
if r.Method == http.MethodOptions {
|
||||||
|
w.WriteHeader(http.StatusNoContent)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Health check (with timeout)
|
||||||
|
healthHandler := handlers.NewHealthHandler(dbPool)
|
||||||
|
r.With(middleware.Timeout(30 * time.Second)).Get("/health", healthHandler.ServeHTTP)
|
||||||
|
|
||||||
|
// ── Repositories ──────────────────────────────────────────────────────
|
||||||
|
materialRepo := repositories.NewMaterialRepository(dbPool)
|
||||||
|
filamentRepo := repositories.NewFilamentRepository(dbPool)
|
||||||
|
printerRepo := repositories.NewPrinterRepository(dbPool)
|
||||||
|
printJobRepo := repositories.NewPrintJobRepository(dbPool)
|
||||||
|
usageLogRepo := repositories.NewUsageLogRepository(dbPool)
|
||||||
|
|
||||||
|
// ── Services ──────────────────────────────────────────────────────────
|
||||||
|
filamentService := services.NewFilamentService(filamentRepo)
|
||||||
|
printerService := services.NewPrinterService(printerRepo)
|
||||||
|
printJobService := services.NewPrintJobService(printJobRepo)
|
||||||
|
|
||||||
|
// ── Handlers ──────────────────────────────────────────────────────────
|
||||||
|
materialHandler := handlers.NewMaterialHandler(materialRepo)
|
||||||
|
filamentHandler := handlers.NewFilamentHandler(filamentService)
|
||||||
|
printerHandler := handlers.NewPrinterHandler(printerService)
|
||||||
|
printJobHandler := handlers.NewPrintJobHandler(printJobService)
|
||||||
|
usageLogHandler := handlers.NewUsageLogHandler(usageLogRepo)
|
||||||
|
|
||||||
|
// ── API Routes (with timeout) ─────────────────────────────────────────
|
||||||
|
r.Route("/api", func(r chi.Router) {
|
||||||
|
r.Use(middleware.Timeout(60 * time.Second))
|
||||||
|
r.Get("/materials", materialHandler.List)
|
||||||
|
|
||||||
|
r.Route("/filaments", func(r chi.Router) {
|
||||||
|
r.Get("/", filamentHandler.List)
|
||||||
|
r.Post("/", filamentHandler.Create)
|
||||||
|
r.Route("/{id}", func(r chi.Router) {
|
||||||
|
r.Get("/", filamentHandler.Get)
|
||||||
|
r.Put("/", filamentHandler.Update)
|
||||||
|
r.Delete("/", filamentHandler.Delete)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
r.Get("/printers", printerHandler.List)
|
||||||
|
r.Get("/print-jobs", printJobHandler.List)
|
||||||
|
r.Get("/usage-logs", usageLogHandler.List)
|
||||||
|
|
||||||
|
// SSE Events stream
|
||||||
|
sseHandler := sse.NewHandler(sseBC)
|
||||||
|
r.Get("/events", sseHandler.ServeHTTP)
|
||||||
|
})
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
82
backend/internal/services/services.go
Normal file
82
backend/internal/services/services.go
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
// Package services contains business logic and application services.
|
||||||
|
package services
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FilamentService wraps FilamentRepository with business logic and validation.
|
||||||
|
type FilamentService struct {
|
||||||
|
repo *repositories.FilamentRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilamentService creates a FilamentService backed by the given repository.
|
||||||
|
func NewFilamentService(repo *repositories.FilamentRepository) *FilamentService {
|
||||||
|
return &FilamentService{repo: repo}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns paginated filament spools filtered by the given criteria.
|
||||||
|
func (s *FilamentService) List(ctx context.Context, filter repositories.FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||||
|
return s.repo.GetAll(ctx, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByID returns a single filament spool by ID.
|
||||||
|
func (s *FilamentService) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||||
|
return s.repo.GetByID(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create validates and creates a new filament spool.
|
||||||
|
func (s *FilamentService) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||||
|
if err := validateFilamentSpool(spool); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return s.repo.Create(ctx, spool)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update applies partial updates to a filament spool after validation.
|
||||||
|
func (s *FilamentService) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||||
|
return s.repo.Update(ctx, id, updates)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoftDelete marks a filament spool as deleted.
|
||||||
|
func (s *FilamentService) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||||
|
return s.repo.SoftDelete(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrinterService wraps PrinterRepository.
|
||||||
|
type PrinterService struct {
|
||||||
|
repo *repositories.PrinterRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrinterService creates a PrinterService backed by the given repository.
|
||||||
|
func NewPrinterService(repo *repositories.PrinterRepository) *PrinterService {
|
||||||
|
return &PrinterService{repo: repo}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns all printers.
|
||||||
|
func (s *PrinterService) List(ctx context.Context) ([]models.Printer, error) {
|
||||||
|
return s.repo.GetAll(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrintJobService wraps PrintJobRepository.
|
||||||
|
type PrintJobService struct {
|
||||||
|
repo *repositories.PrintJobRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrintJobService creates a PrintJobService backed by the given repository.
|
||||||
|
func NewPrintJobService(repo *repositories.PrintJobRepository) *PrintJobService {
|
||||||
|
return &PrintJobService{repo: repo}
|
||||||
|
}
|
||||||
|
|
||||||
|
// List returns paginated print jobs filtered by the given criteria.
|
||||||
|
func (s *PrintJobService) List(ctx context.Context, filter repositories.PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||||
|
return s.repo.GetAll(ctx, filter)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetByID returns a single print job by ID.
|
||||||
|
func (s *PrintJobService) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||||
|
return s.repo.GetByID(ctx, id)
|
||||||
|
}
|
||||||
74
backend/internal/services/validation.go
Normal file
74
backend/internal/services/validation.go
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
package services
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||||
|
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
// colorHexPattern validates hex color strings like #FF0000 or #ff0000.
|
||||||
|
var colorHexPattern = regexp.MustCompile(`^#[0-9A-Fa-f]{6}$`)
|
||||||
|
|
||||||
|
// validateFilamentSpool performs validation on a FilamentSpool entity.
|
||||||
|
// Returns a descriptive error on failure.
|
||||||
|
func validateFilamentSpool(s *models.FilamentSpool) error {
|
||||||
|
if s.Name == "" {
|
||||||
|
return errors.New("name is required")
|
||||||
|
}
|
||||||
|
if s.MaterialBaseID <= 0 {
|
||||||
|
return errors.New("material_base_id is required")
|
||||||
|
}
|
||||||
|
if s.MaterialFinishID <= 0 {
|
||||||
|
return errors.New("material_finish_id is required")
|
||||||
|
}
|
||||||
|
if !colorHexPattern.MatchString(s.ColorHex) {
|
||||||
|
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||||
|
}
|
||||||
|
if s.InitialGrams <= 0 {
|
||||||
|
return errors.New("initial_grams must be greater than 0")
|
||||||
|
}
|
||||||
|
if s.RemainingGrams < 0 {
|
||||||
|
return errors.New("remaining_grams must be >= 0")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateCreateFilamentRequest validates a creation DTO.
|
||||||
|
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||||
|
if req.Name == "" {
|
||||||
|
return errors.New("name is required")
|
||||||
|
}
|
||||||
|
if req.MaterialBaseID <= 0 {
|
||||||
|
return errors.New("material_base_id is required")
|
||||||
|
}
|
||||||
|
if req.MaterialFinishID <= 0 {
|
||||||
|
return errors.New("material_finish_id is required")
|
||||||
|
}
|
||||||
|
if !colorHexPattern.MatchString(req.ColorHex) {
|
||||||
|
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||||
|
}
|
||||||
|
if req.InitialGrams <= 0 {
|
||||||
|
return errors.New("initial_grams must be greater than 0")
|
||||||
|
}
|
||||||
|
if req.RemainingGrams < 0 {
|
||||||
|
return errors.New("remaining_grams must be >= 0")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateUpdateFilamentRequest validates partial update fields.
|
||||||
|
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||||
|
if req.ColorHex != nil && !colorHexPattern.MatchString(*req.ColorHex) {
|
||||||
|
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||||
|
}
|
||||||
|
if req.InitialGrams != nil && *req.InitialGrams <= 0 {
|
||||||
|
return errors.New("initial_grams must be greater than 0")
|
||||||
|
}
|
||||||
|
if req.RemainingGrams != nil && *req.RemainingGrams < 0 {
|
||||||
|
return errors.New("remaining_grams must be >= 0")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
133
backend/internal/sse/broadcaster.go
Normal file
133
backend/internal/sse/broadcaster.go
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
package sse
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// client represents a single SSE subscriber — identified by its send channel.
|
||||||
|
type client struct {
|
||||||
|
ch chan string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Broadcaster receives Events on its input channel and fans them out to every
|
||||||
|
// connected client. Subscribe adds a new client; Unsubscribe removes one.
|
||||||
|
// Start must be called before the broadcaster accepts events.
|
||||||
|
type Broadcaster struct {
|
||||||
|
input chan Event
|
||||||
|
subscribe chan client
|
||||||
|
unsubscribe chan client
|
||||||
|
clients map[chan string]struct{}
|
||||||
|
done chan struct{}
|
||||||
|
once sync.Once
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewBroadcaster creates a Broadcaster. bufSize controls the buffer depth for
|
||||||
|
// the input channel as well as for each per-client outbound channel.
|
||||||
|
func NewBroadcaster(bufSize int) *Broadcaster {
|
||||||
|
if bufSize <= 0 {
|
||||||
|
bufSize = 64
|
||||||
|
}
|
||||||
|
return &Broadcaster{
|
||||||
|
input: make(chan Event, bufSize),
|
||||||
|
subscribe: make(chan client),
|
||||||
|
unsubscribe: make(chan client),
|
||||||
|
clients: make(map[chan string]struct{}),
|
||||||
|
done: make(chan struct{}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish pushes an event into the broadcaster. Safe for concurrent use.
|
||||||
|
func (b *Broadcaster) Publish(ev Event) {
|
||||||
|
select {
|
||||||
|
case b.input <- ev:
|
||||||
|
case <-b.done:
|
||||||
|
// Silently drop during shutdown.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start launches the broadcaster's fan-out loop in a goroutine.
|
||||||
|
// It must be called before Publish is used.
|
||||||
|
func (b *Broadcaster) Start() {
|
||||||
|
go b.loop()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop terminates the fan-out loop and closes all client channels.
|
||||||
|
// It is safe to call multiple times.
|
||||||
|
func (b *Broadcaster) Stop() {
|
||||||
|
b.once.Do(func() {
|
||||||
|
close(b.done)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe returns a new client channel that receives SSE-formatted strings.
|
||||||
|
func (b *Broadcaster) Subscribe() chan string {
|
||||||
|
c := client{ch: make(chan string, 64)}
|
||||||
|
select {
|
||||||
|
case b.subscribe <- c:
|
||||||
|
case <-b.done:
|
||||||
|
// Broadcaster already stopped — return a closed chan so the handler
|
||||||
|
// can bail out quickly.
|
||||||
|
ch := make(chan string)
|
||||||
|
close(ch)
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
return c.ch
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unsubscribe removes a client channel and closes it.
|
||||||
|
func (b *Broadcaster) Unsubscribe(ch chan string) {
|
||||||
|
c := client{ch: ch}
|
||||||
|
select {
|
||||||
|
case b.unsubscribe <- c:
|
||||||
|
case <-b.done:
|
||||||
|
// Already shutting down — channels will be cleaned up by Stop.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// loop is the core fan-out goroutine.
|
||||||
|
func (b *Broadcaster) loop() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case ev := <-b.input:
|
||||||
|
sse := ev.toSSE()
|
||||||
|
for ch := range b.clients {
|
||||||
|
// Non-blocking send — slow clients are dropped.
|
||||||
|
select {
|
||||||
|
case ch <- sse:
|
||||||
|
default:
|
||||||
|
slog.Warn("sse broadcaster: dropping event for slow client", "type", ev.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case c := <-b.subscribe:
|
||||||
|
b.clients[c.ch] = struct{}{}
|
||||||
|
slog.Debug("sse broadcaster: client connected", "total_clients", len(b.clients))
|
||||||
|
|
||||||
|
case c := <-b.unsubscribe:
|
||||||
|
if _, ok := b.clients[c.ch]; ok {
|
||||||
|
delete(b.clients, c.ch)
|
||||||
|
close(c.ch)
|
||||||
|
slog.Debug("sse broadcaster: client disconnected", "total_clients", len(b.clients))
|
||||||
|
}
|
||||||
|
|
||||||
|
case <-b.done:
|
||||||
|
// Drain remaining events in input before shutting down.
|
||||||
|
for ev := range b.input {
|
||||||
|
sse := ev.toSSE()
|
||||||
|
for ch := range b.clients {
|
||||||
|
select {
|
||||||
|
case ch <- sse:
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Close all remaining client channels.
|
||||||
|
for ch := range b.clients {
|
||||||
|
close(ch)
|
||||||
|
}
|
||||||
|
b.clients = nil
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
92
backend/internal/sse/events.go
Normal file
92
backend/internal/sse/events.go
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
// Package sse provides Server-Sent Events infrastructure for real-time updates.
|
||||||
|
// Includes event types, a central broadcaster, and an HTTP handler.
|
||||||
|
package sse
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// EventType identifies the category of an SSE event.
|
||||||
|
type EventType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
EventPrinterStatus EventType = "printer.status"
|
||||||
|
EventJobStarted EventType = "job.started"
|
||||||
|
EventJobCompleted EventType = "job.completed"
|
||||||
|
EventFilamentLow EventType = "filament.low"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Event is a JSON-serializable SSE event pushed through the broadcaster.
|
||||||
|
type Event struct {
|
||||||
|
Type EventType `json:"type"`
|
||||||
|
Payload json.RawMessage `json:"payload"`
|
||||||
|
Timestamp time.Time `json:"timestamp"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrinterStatusPayload carries printer online/offline/printing state.
|
||||||
|
type PrinterStatusPayload struct {
|
||||||
|
PrinterID int `json:"printer_id"`
|
||||||
|
PrinterName string `json:"printer_name"`
|
||||||
|
Status string `json:"status"` // "online", "offline", "printing"
|
||||||
|
}
|
||||||
|
|
||||||
|
// JobStartedPayload carries initial print job info.
|
||||||
|
type JobStartedPayload struct {
|
||||||
|
JobID int `json:"job_id"`
|
||||||
|
JobName string `json:"job_name"`
|
||||||
|
PrinterID int `json:"printer_id"`
|
||||||
|
SpoolID *int `json:"spool_id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// JobCompletedPayload carries final print job data including usage.
|
||||||
|
type JobCompletedPayload struct {
|
||||||
|
JobID int `json:"job_id"`
|
||||||
|
JobName string `json:"job_name"`
|
||||||
|
PrinterID int `json:"printer_id"`
|
||||||
|
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||||
|
TotalGramsUsed *float64 `json:"total_grams_used,omitempty"`
|
||||||
|
TotalCostUSD *float64 `json:"total_cost_usd,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilamentLowPayload alerts that a spool is below its threshold.
|
||||||
|
type FilamentLowPayload struct {
|
||||||
|
SpoolID int `json:"spool_id"`
|
||||||
|
SpoolName string `json:"spool_name"`
|
||||||
|
RemainingGrams int `json:"remaining_grams"`
|
||||||
|
ThresholdGrams int `json:"threshold_grams"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEvent creates an Event with the current timestamp from a typed payload.
|
||||||
|
func NewEvent(eventType EventType, payload any) (Event, error) {
|
||||||
|
raw, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return Event{}, err
|
||||||
|
}
|
||||||
|
return Event{
|
||||||
|
Type: eventType,
|
||||||
|
Payload: raw,
|
||||||
|
Timestamp: time.Now().UTC(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustEvent creates an Event and panics on marshal failure (for use with
|
||||||
|
// known-good payloads in tests and internal wiring).
|
||||||
|
func MustEvent(eventType EventType, payload any) Event {
|
||||||
|
ev, err := NewEvent(eventType, payload)
|
||||||
|
if err != nil {
|
||||||
|
panic("sse.MustEvent: failed to marshal payload: " + err.Error())
|
||||||
|
}
|
||||||
|
return ev
|
||||||
|
}
|
||||||
|
|
||||||
|
// toSSE formats this Event as a standard SSE message string ready to be
|
||||||
|
// written to a response writer. The format is:
|
||||||
|
//
|
||||||
|
// event: <type>
|
||||||
|
// data: <json>
|
||||||
|
//
|
||||||
|
func (e Event) toSSE() string {
|
||||||
|
data, _ := json.Marshal(e)
|
||||||
|
return "event: " + string(e.Type) + "\n" + "data: " + string(data) + "\n\n"
|
||||||
|
}
|
||||||
59
backend/internal/sse/handler.go
Normal file
59
backend/internal/sse/handler.go
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
package sse
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler is the HTTP handler for the GET /api/events SSE stream.
|
||||||
|
// It registers a client with the broadcaster, streams events as they arrive,
|
||||||
|
// and unregisters on disconnect.
|
||||||
|
type Handler struct {
|
||||||
|
bc *Broadcaster
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewHandler creates a Handler backed by the given Broadcaster.
|
||||||
|
func NewHandler(bc *Broadcaster) *Handler {
|
||||||
|
return &Handler{bc: bc}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServeHTTP implements the SSE streaming endpoint.
|
||||||
|
// Flusher is required; clients that do not support flushing receive a 501.
|
||||||
|
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
flusher, ok := w.(http.Flusher)
|
||||||
|
if !ok {
|
||||||
|
http.Error(w, "streaming not supported", http.StatusNotImplemented)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// SSE-specific headers
|
||||||
|
w.Header().Set("Content-Type", "text/event-stream")
|
||||||
|
w.Header().Set("Cache-Control", "no-cache")
|
||||||
|
w.Header().Set("Connection", "keep-alive")
|
||||||
|
w.Header().Set("X-Accel-Buffering", "no") // Disable nginx buffering
|
||||||
|
|
||||||
|
// Write headers immediately
|
||||||
|
flusher.Flush()
|
||||||
|
|
||||||
|
// Subscribe to the broadcaster
|
||||||
|
ch := h.bc.Subscribe()
|
||||||
|
defer h.bc.Unsubscribe(ch)
|
||||||
|
|
||||||
|
// Use request context for cancellation when the client disconnects.
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return
|
||||||
|
case msg, ok := <-ch:
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, err := w.Write([]byte(msg))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
flusher.Flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
19
backend/migrations/000001_initial_schema.down.sql
Normal file
19
backend/migrations/000001_initial_schema.down.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
-- Migration: 000001_initial_schema (rollback)
|
||||||
|
-- Description: Drop all tables and indexes created in the initial schema migration
|
||||||
|
-- Author: Hex
|
||||||
|
-- Date: 2026-05-06
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS usage_logs CASCADE;
|
||||||
|
DROP TABLE IF EXISTS print_jobs CASCADE;
|
||||||
|
DROP TABLE IF EXISTS filament_spools CASCADE;
|
||||||
|
DROP TABLE IF EXISTS printers CASCADE;
|
||||||
|
DROP TABLE IF EXISTS settings CASCADE;
|
||||||
|
DROP TABLE IF EXISTS material_modifiers CASCADE;
|
||||||
|
DROP TABLE IF EXISTS material_finishes CASCADE;
|
||||||
|
DROP TABLE IF EXISTS material_bases CASCADE;
|
||||||
|
DROP TABLE IF EXISTS job_statuses CASCADE;
|
||||||
|
DROP TABLE IF EXISTS printer_types CASCADE;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
231
backend/migrations/000001_initial_schema.up.sql
Normal file
231
backend/migrations/000001_initial_schema.up.sql
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
-- Migration: 000001_initial_schema
|
||||||
|
-- Description: Create initial Extrudex schema — lookup tables, core entities, and settings
|
||||||
|
-- Author: Hex
|
||||||
|
-- Date: 2026-05-06
|
||||||
|
--
|
||||||
|
-- Design decisions:
|
||||||
|
-- - Lookup tables for material_base, material_finish, material_modifier (no free-text enums)
|
||||||
|
-- - Lookup tables for printer_type and job_status (extensible, no hard-coded enum values)
|
||||||
|
-- - FK ON DELETE: RESTRICT on critical parents (material_base, material_finish, printer),
|
||||||
|
-- SET NULL on optional parents (modifier, spool on print_jobs),
|
||||||
|
-- CASCADE for usage_logs when parent job is deleted
|
||||||
|
-- - Soft-delete (deleted_at) on spools and print_jobs for safety
|
||||||
|
-- - JSONB config column on settings for flexible app-wide configuration
|
||||||
|
-- - All identifiers snake_case per project convention
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Lookup Tables
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Printer types (fdm, resin, etc.) — extensible, not a raw enum
|
||||||
|
CREATE TABLE printer_types (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Job statuses (pending, printing, paused, completed, failed, cancelled)
|
||||||
|
CREATE TABLE job_statuses (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(50) NOT NULL UNIQUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Material base types (PLA, PETG, ABS, TPU, ASA, Nylon, PC)
|
||||||
|
CREATE TABLE material_bases (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
density_g_cm3 DECIMAL(5,3) NOT NULL,
|
||||||
|
extrusion_temp_min INT,
|
||||||
|
extrusion_temp_max INT,
|
||||||
|
bed_temp_min INT,
|
||||||
|
bed_temp_max INT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Material finishes (Basic, Silk, Matte, Glossy, Satin)
|
||||||
|
CREATE TABLE material_finishes (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Material modifiers (Wood-Filled, Carbon Fiber, Glow-in-Dark, Marble)
|
||||||
|
CREATE TABLE material_modifiers (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(100) NOT NULL UNIQUE,
|
||||||
|
description TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Core Entity Tables
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- 3D printers in the fleet
|
||||||
|
CREATE TABLE printers (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
printer_type_id INT NOT NULL,
|
||||||
|
manufacturer VARCHAR(255),
|
||||||
|
model VARCHAR(255),
|
||||||
|
moonraker_url VARCHAR(512),
|
||||||
|
moonraker_api_key VARCHAR(512),
|
||||||
|
mqtt_broker_host VARCHAR(255),
|
||||||
|
mqtt_topic_prefix VARCHAR(255),
|
||||||
|
mqtt_tls_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT fk_printers_printer_type
|
||||||
|
FOREIGN KEY (printer_type_id) REFERENCES printer_types(id)
|
||||||
|
ON DELETE RESTRICT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Filament spools — the core inventory item
|
||||||
|
CREATE TABLE filament_spools (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
material_base_id INT NOT NULL,
|
||||||
|
material_finish_id INT NOT NULL DEFAULT 1, -- "Basic" (seed data populates this first)
|
||||||
|
material_modifier_id INT,
|
||||||
|
color_hex VARCHAR(7) NOT NULL CHECK (color_hex ~ '^#[0-9A-Fa-f]{6}$'),
|
||||||
|
brand VARCHAR(255),
|
||||||
|
diameter_mm DECIMAL(4,2) NOT NULL DEFAULT 1.75,
|
||||||
|
initial_grams INT NOT NULL CHECK (initial_grams > 0),
|
||||||
|
remaining_grams INT NOT NULL CHECK (remaining_grams >= 0),
|
||||||
|
spool_weight_grams INT, -- measured empty-spool weight (tare), nullable
|
||||||
|
cost_usd DECIMAL(10,2),
|
||||||
|
low_stock_threshold_grams INT NOT NULL DEFAULT 50,
|
||||||
|
notes TEXT,
|
||||||
|
barcode VARCHAR(255) UNIQUE,
|
||||||
|
deleted_at TIMESTAMPTZ,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT fk_spools_material_base
|
||||||
|
FOREIGN KEY (material_base_id) REFERENCES material_bases(id)
|
||||||
|
ON DELETE RESTRICT,
|
||||||
|
|
||||||
|
CONSTRAINT fk_spools_material_finish
|
||||||
|
FOREIGN KEY (material_finish_id) REFERENCES material_finishes(id)
|
||||||
|
ON DELETE RESTRICT,
|
||||||
|
|
||||||
|
CONSTRAINT fk_spools_material_modifier
|
||||||
|
FOREIGN KEY (material_modifier_id) REFERENCES material_modifiers(id)
|
||||||
|
ON DELETE SET NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Print jobs — each job is one print on one printer
|
||||||
|
CREATE TABLE print_jobs (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
printer_id INT NOT NULL,
|
||||||
|
filament_spool_id INT, -- nullable: a job may use multiple spools (captured in usage_logs)
|
||||||
|
job_name VARCHAR(255) NOT NULL,
|
||||||
|
file_name VARCHAR(512),
|
||||||
|
job_status_id INT NOT NULL DEFAULT 1, -- "pending"
|
||||||
|
started_at TIMESTAMPTZ,
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
duration_seconds INT,
|
||||||
|
estimated_duration_seconds INT,
|
||||||
|
total_mm_extruded DECIMAL(12,2),
|
||||||
|
total_grams_used DECIMAL(10,2),
|
||||||
|
total_cost_usd DECIMAL(10,4),
|
||||||
|
notes TEXT,
|
||||||
|
deleted_at TIMESTAMPTZ,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT fk_print_jobs_printer
|
||||||
|
FOREIGN KEY (printer_id) REFERENCES printers(id)
|
||||||
|
ON DELETE RESTRICT,
|
||||||
|
|
||||||
|
CONSTRAINT fk_print_jobs_spool
|
||||||
|
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||||
|
ON DELETE SET NULL,
|
||||||
|
|
||||||
|
CONSTRAINT fk_print_jobs_status
|
||||||
|
FOREIGN KEY (job_status_id) REFERENCES job_statuses(id)
|
||||||
|
ON DELETE RESTRICT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Usage logs — granular tracking of filament consumed per job, per spool
|
||||||
|
CREATE TABLE usage_logs (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
print_job_id INT NOT NULL,
|
||||||
|
filament_spool_id INT NOT NULL,
|
||||||
|
mm_extruded DECIMAL(12,2) NOT NULL CHECK (mm_extruded > 0),
|
||||||
|
grams_used DECIMAL(10,2) NOT NULL CHECK (grams_used > 0),
|
||||||
|
cost_usd DECIMAL(10,4),
|
||||||
|
logged_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT fk_usage_logs_print_job
|
||||||
|
FOREIGN KEY (print_job_id) REFERENCES print_jobs(id)
|
||||||
|
ON DELETE CASCADE,
|
||||||
|
|
||||||
|
CONSTRAINT fk_usage_logs_spool
|
||||||
|
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||||
|
ON DELETE RESTRICT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Application Settings
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE TABLE settings (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
key VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
value JSONB NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Indexes
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
-- Filament spools — query patterns: lookup by material, low-stock scans, barcode scans
|
||||||
|
CREATE INDEX ix_spools_material_base_id ON filament_spools(material_base_id);
|
||||||
|
CREATE INDEX ix_spools_material_finish_id ON filament_spools(material_finish_id);
|
||||||
|
CREATE INDEX ix_spools_material_modifier_id ON filament_spools(material_modifier_id);
|
||||||
|
CREATE INDEX ix_spools_remaining_grams ON filament_spools(remaining_grams)
|
||||||
|
WHERE deleted_at IS NULL; -- partial index: only active spools for low-stock queries
|
||||||
|
CREATE INDEX ix_spools_barcode ON filament_spools(barcode)
|
||||||
|
WHERE barcode IS NOT NULL AND deleted_at IS NULL;
|
||||||
|
CREATE INDEX ix_spools_deleted_at ON filament_spools(deleted_at)
|
||||||
|
WHERE deleted_at IS NOT NULL; -- small index for soft-delete filtering
|
||||||
|
|
||||||
|
-- Printers
|
||||||
|
CREATE INDEX ix_printers_printer_type_id ON printers(printer_type_id);
|
||||||
|
CREATE INDEX ix_printers_is_active ON printers(is_active)
|
||||||
|
WHERE is_active = TRUE; -- partial index for fleet dashboard queries
|
||||||
|
|
||||||
|
-- Print jobs — query by printer, status, date range, and soft-delete filter
|
||||||
|
CREATE INDEX ix_print_jobs_printer_id ON print_jobs(printer_id);
|
||||||
|
CREATE INDEX ix_print_jobs_spool_id ON print_jobs(filament_spool_id)
|
||||||
|
WHERE filament_spool_id IS NOT NULL;
|
||||||
|
CREATE INDEX ix_print_jobs_status_id ON print_jobs(job_status_id);
|
||||||
|
CREATE INDEX ix_print_jobs_created_at ON print_jobs(created_at DESC);
|
||||||
|
CREATE INDEX ix_print_jobs_deleted_at ON print_jobs(deleted_at)
|
||||||
|
WHERE deleted_at IS NOT NULL;
|
||||||
|
|
||||||
|
-- Usage logs — always queried by job or spool
|
||||||
|
CREATE INDEX ix_usage_logs_print_job_id ON usage_logs(print_job_id);
|
||||||
|
CREATE INDEX ix_usage_logs_spool_id ON usage_logs(filament_spool_id);
|
||||||
|
CREATE INDEX ix_usage_logs_logged_at ON usage_logs(logged_at DESC);
|
||||||
|
|
||||||
|
-- Settings — key lookups
|
||||||
|
CREATE INDEX ix_settings_key ON settings(key);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
15
backend/migrations/000002_seed_data.down.sql
Normal file
15
backend/migrations/000002_seed_data.down.sql
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
-- Migration: 000002_seed_data (rollback)
|
||||||
|
-- Description: Remove seed data inserted in 000002
|
||||||
|
-- Author: Hex
|
||||||
|
-- Date: 2026-05-06
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
DELETE FROM settings WHERE key IN ('default_low_stock_threshold_grams', 'default_diameter_mm', 'filament_cross_section_area_mm2');
|
||||||
|
DELETE FROM material_modifiers WHERE id IN (1, 2, 3, 4);
|
||||||
|
DELETE FROM material_finishes WHERE id IN (1, 2, 3, 4, 5);
|
||||||
|
DELETE FROM material_bases WHERE id IN (1, 2, 3, 4, 5, 6, 7);
|
||||||
|
DELETE FROM job_statuses WHERE id IN (1, 2, 3, 4, 5, 6);
|
||||||
|
DELETE FROM printer_types WHERE id IN (1, 2);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
95
backend/migrations/000002_seed_data.up.sql
Normal file
95
backend/migrations/000002_seed_data.up.sql
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
-- Seed Data: Extrudex common reference data
|
||||||
|
-- Author: Hex
|
||||||
|
-- Date: 2026-05-06
|
||||||
|
--
|
||||||
|
-- IMPORTANT: IDs are explicitly assigned to satisfy the DEFAULT constraints:
|
||||||
|
-- - filament_spools.material_finish_id DEFAULT 1 ("Basic")
|
||||||
|
-- - print_jobs.job_status_id DEFAULT 1 ("pending")
|
||||||
|
--
|
||||||
|
-- Density values sourced from common manufacturer specifications.
|
||||||
|
-- Temperature ranges are conservative/typical; users can override per-spool.
|
||||||
|
|
||||||
|
BEGIN;
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Printer Types
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
INSERT INTO printer_types (id, name) VALUES
|
||||||
|
(1, 'fdm'),
|
||||||
|
(2, 'resin')
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
-- Reset the sequence so future inserts start after our explicit IDs
|
||||||
|
SELECT setval('printer_types_id_seq', GREATEST(2, (SELECT MAX(id) FROM printer_types)));
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Job Statuses
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
INSERT INTO job_statuses (id, name) VALUES
|
||||||
|
(1, 'pending'),
|
||||||
|
(2, 'printing'),
|
||||||
|
(3, 'paused'),
|
||||||
|
(4, 'completed'),
|
||||||
|
(5, 'failed'),
|
||||||
|
(6, 'cancelled')
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
SELECT setval('job_statuses_id_seq', GREATEST(6, (SELECT MAX(id) FROM job_statuses)));
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Material Bases (common filament types)
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
INSERT INTO material_bases (id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max, bed_temp_min, bed_temp_max) VALUES
|
||||||
|
(1, 'PLA', 1.24, 190, 220, 0, 60),
|
||||||
|
(2, 'PETG', 1.27, 230, 250, 70, 90),
|
||||||
|
(3, 'ABS', 1.04, 230, 260, 90, 110),
|
||||||
|
(4, 'TPU', 1.21, 220, 250, 0, 60),
|
||||||
|
(5, 'ASA', 1.07, 240, 260, 90, 110),
|
||||||
|
(6, 'Nylon', 1.14, 240, 280, 70, 100),
|
||||||
|
(7, 'PC', 1.20, 260, 310, 90, 120)
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
SELECT setval('material_bases_id_seq', GREATEST(7, (SELECT MAX(id) FROM material_bases)));
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Material Finishes
|
||||||
|
-- ============================================================================
|
||||||
|
-- ID 1 = "Basic" is the default for new spools (DEFAULT 1 constraint)
|
||||||
|
|
||||||
|
INSERT INTO material_finishes (id, name, description) VALUES
|
||||||
|
(1, 'Basic', 'Standard solid-color filament with no special finish'),
|
||||||
|
(2, 'Silk', 'Glossy silk-like sheen, often used for decorative prints'),
|
||||||
|
(3, 'Matte', 'Flat non-reflective surface finish'),
|
||||||
|
(4, 'Glossy', 'High-shine reflective surface'),
|
||||||
|
(5, 'Satin', 'Semi-gloss between matte and glossy')
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
SELECT setval('material_finishes_id_seq', GREATEST(5, (SELECT MAX(id) FROM material_finishes)));
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Material Modifiers
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
INSERT INTO material_modifiers (id, name, description) VALUES
|
||||||
|
(1, 'Wood-Filled', 'Contains wood fibers for natural wood-like appearance and texture'),
|
||||||
|
(2, 'Carbon Fiber', 'Reinforced with carbon fibers for increased stiffness and strength'),
|
||||||
|
(3, 'Glow-in-Dark', 'Phosphorescent additive that glows after exposure to light'),
|
||||||
|
(4, 'Marble', 'Contains specks for a stone-like marble appearance')
|
||||||
|
ON CONFLICT (id) DO NOTHING;
|
||||||
|
|
||||||
|
SELECT setval('material_modifiers_id_seq', GREATEST(4, (SELECT MAX(id) FROM material_modifiers)));
|
||||||
|
|
||||||
|
-- ============================================================================
|
||||||
|
-- Default Application Settings
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
INSERT INTO settings (key, value, description) VALUES
|
||||||
|
('default_low_stock_threshold_grams', '50', 'Default grams threshold for low-stock alerts on new spools'),
|
||||||
|
('default_diameter_mm', '1.75', 'Default filament diameter for new spools (1.75mm is the modern standard)'),
|
||||||
|
('filament_cross_section_area_mm2', '2.405', 'Cross-sectional area for 1.75mm filament: π × (1.75/2)²')
|
||||||
|
ON CONFLICT (key) DO NOTHING;
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
44
frontend/.gitignore
vendored
44
frontend/.gitignore
vendored
@@ -1,44 +0,0 @@
|
|||||||
# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files.
|
|
||||||
|
|
||||||
# Compiled output
|
|
||||||
/dist
|
|
||||||
/tmp
|
|
||||||
/out-tsc
|
|
||||||
/bazel-out
|
|
||||||
|
|
||||||
# Node
|
|
||||||
/node_modules
|
|
||||||
npm-debug.log
|
|
||||||
yarn-error.log
|
|
||||||
|
|
||||||
# IDEs and editors
|
|
||||||
.idea/
|
|
||||||
.project
|
|
||||||
.classpath
|
|
||||||
.c9/
|
|
||||||
*.launch
|
|
||||||
.settings/
|
|
||||||
*.sublime-workspace
|
|
||||||
|
|
||||||
# Visual Studio Code
|
|
||||||
.vscode/*
|
|
||||||
!.vscode/settings.json
|
|
||||||
!.vscode/tasks.json
|
|
||||||
!.vscode/launch.json
|
|
||||||
!.vscode/extensions.json
|
|
||||||
!.vscode/mcp.json
|
|
||||||
.history/*
|
|
||||||
|
|
||||||
# Miscellaneous
|
|
||||||
/.angular/cache
|
|
||||||
.sass-cache/
|
|
||||||
/connect.lock
|
|
||||||
/coverage
|
|
||||||
/libpeerconnection.log
|
|
||||||
testem.log
|
|
||||||
/typings
|
|
||||||
__screenshots__/
|
|
||||||
|
|
||||||
# System files
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
@@ -1,20 +1,14 @@
|
|||||||
# Multi-stage build for production
|
# Build stage
|
||||||
FROM node:22-alpine AS builder
|
FROM node:22-alpine AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Production stage — serve with nginx
|
# Serve stage
|
||||||
FROM nginx:alpine
|
FROM nginx:alpine
|
||||||
|
|
||||||
COPY --from=builder /app/dist /usr/share/nginx/html
|
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
|
||||||
CMD ["nginx", "-g", "daemon off;"]
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ export default tseslint.config(
|
|||||||
extends: [js.configs.recommended, ...tseslint.configs.recommended],
|
extends: [js.configs.recommended, ...tseslint.configs.recommended],
|
||||||
files: ['**/*.{ts,tsx}'],
|
files: ['**/*.{ts,tsx}'],
|
||||||
languageOptions: {
|
languageOptions: {
|
||||||
ecmaVersion: 2023,
|
ecmaVersion: 2020,
|
||||||
globals: globals.browser,
|
globals: globals.browser,
|
||||||
},
|
},
|
||||||
plugins: {
|
plugins: {
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<meta name="theme-color" content="#0f172a" />
|
|
||||||
<title>Extrudex</title>
|
<title>Extrudex</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@@ -1,16 +1,23 @@
|
|||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
listen [::]:80;
|
server_name _;
|
||||||
server_name localhost;
|
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
root /usr/share/nginx/html;
|
|
||||||
index index.html;
|
|
||||||
try_files $uri $uri/ /index.html;
|
try_files $uri $uri/ /index.html;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_page 500 502 503 504 /50x.html;
|
location /api/ {
|
||||||
location = /50x.html {
|
proxy_pass http://backend:8080/api/;
|
||||||
root /usr/share/nginx/html;
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection 'upgrade';
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_cache_bypass $http_upgrade;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
2120
frontend/package-lock.json
generated
2120
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,36 +1,35 @@
|
|||||||
{
|
{
|
||||||
"name": "extrudex-frontend",
|
"name": "extrudex-frontend",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.0.0",
|
"version": "0.0.1",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
"build": "tsc -b && vite build",
|
"build": "tsc && vite build",
|
||||||
"lint": "eslint .",
|
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@tailwindcss/vite": "^4.2.4",
|
"@tanstack/react-query": "^5.60.0",
|
||||||
"@tanstack/react-query": "^5.100.9",
|
"axios": "^1.7.0",
|
||||||
"axios": "^1.16.0",
|
"lucide-react": "^0.460.0",
|
||||||
"react": "^19.2.5",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.2.5",
|
"react-dom": "^19.0.0",
|
||||||
"react-router-dom": "^7.15.0",
|
"react-router-dom": "^7.0.0"
|
||||||
"tailwindcss": "^4.2.4",
|
|
||||||
"zustand": "^5.0.13"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@eslint/js": "^10.0.1",
|
"@tailwindcss/postcss": "^4.2.4",
|
||||||
"@types/react": "^19.2.14",
|
"@tailwindcss/vite": "^4.2.4",
|
||||||
"@types/react-dom": "^19.2.3",
|
"@types/react": "^19.0.0",
|
||||||
"@types/react-router-dom": "^5.3.3",
|
"@types/react-dom": "^19.0.0",
|
||||||
"@vitejs/plugin-react": "^6.0.1",
|
"@vitejs/plugin-react": "^4.3.0",
|
||||||
"eslint": "^10.2.1",
|
"autoprefixer": "^10.4.20",
|
||||||
"eslint-plugin-react-hooks": "^7.1.1",
|
"eslint": "^9.15.0",
|
||||||
"eslint-plugin-react-refresh": "^0.5.2",
|
"eslint-plugin-react-hooks": "^5.0.0",
|
||||||
"globals": "^17.5.0",
|
"eslint-plugin-react-refresh": "^0.4.14",
|
||||||
"typescript": "~6.0.2",
|
"postcss": "^8.4.49",
|
||||||
"typescript-eslint": "^8.58.2",
|
"tailwindcss": "^4.0.0",
|
||||||
"vite": "^8.0.10"
|
"typescript": "~5.6.0",
|
||||||
|
"vite": "^6.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
5
frontend/postcss.config.js
Normal file
5
frontend/postcss.config.js
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
export default {
|
||||||
|
plugins: {
|
||||||
|
'@tailwindcss/postcss': {},
|
||||||
|
},
|
||||||
|
}
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB |
@@ -1,17 +1,25 @@
|
|||||||
import { Routes, Route } from 'react-router-dom'
|
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||||
import ErrorBoundary from './components/ErrorBoundary'
|
import { BrowserRouter, Routes, Route } from 'react-router-dom'
|
||||||
import HomePage from './pages/HomePage'
|
import InventoryPage from './pages/InventoryPage'
|
||||||
|
|
||||||
function App() {
|
const queryClient = new QueryClient()
|
||||||
|
|
||||||
|
export default function App() {
|
||||||
return (
|
return (
|
||||||
<ErrorBoundary>
|
<QueryClientProvider client={queryClient}>
|
||||||
<div className="min-h-screen bg-slate-900 text-slate-100">
|
<BrowserRouter>
|
||||||
<Routes>
|
<div className="min-h-screen bg-slate-900 text-slate-50">
|
||||||
<Route path="/" element={<HomePage />} />
|
<header className="bg-slate-800 border-b border-slate-700 px-4 py-3 flex items-center gap-3 sticky top-0 z-20">
|
||||||
</Routes>
|
<div className="w-8 h-8 rounded bg-emerald-500 flex items-center justify-center text-slate-900 font-bold text-lg">E</div>
|
||||||
</div>
|
<h1 className="text-lg font-semibold">Extrudex</h1>
|
||||||
</ErrorBoundary>
|
</header>
|
||||||
|
<main className="p-4">
|
||||||
|
<Routes>
|
||||||
|
<Route path="/" element={<InventoryPage />} />
|
||||||
|
</Routes>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
</BrowserRouter>
|
||||||
|
</QueryClientProvider>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export default App
|
|
||||||
|
|||||||
18
frontend/src/components/ColorSwatch.tsx
Normal file
18
frontend/src/components/ColorSwatch.tsx
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
interface ColorSwatchProps {
|
||||||
|
colorHex: string
|
||||||
|
size?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function ColorSwatch({ colorHex, size = 24 }: ColorSwatchProps) {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className="rounded-full border border-slate-600 shadow-sm inline-block"
|
||||||
|
style={{
|
||||||
|
backgroundColor: colorHex.startsWith('#') ? colorHex : `#${colorHex}`,
|
||||||
|
width: size,
|
||||||
|
height: size,
|
||||||
|
}}
|
||||||
|
title={colorHex}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import { Component, type ReactNode } from 'react'
|
|
||||||
|
|
||||||
interface Props {
|
|
||||||
children: ReactNode
|
|
||||||
}
|
|
||||||
|
|
||||||
interface State {
|
|
||||||
hasError: boolean
|
|
||||||
error?: Error
|
|
||||||
}
|
|
||||||
|
|
||||||
class ErrorBoundary extends Component<Props, State> {
|
|
||||||
constructor(props: Props) {
|
|
||||||
super(props)
|
|
||||||
this.state = { hasError: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
static getDerivedStateFromError(error: Error): State {
|
|
||||||
return { hasError: true, error }
|
|
||||||
}
|
|
||||||
|
|
||||||
componentDidCatch(error: Error, info: React.ErrorInfo) {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error('ErrorBoundary caught:', error, info)
|
|
||||||
}
|
|
||||||
|
|
||||||
render() {
|
|
||||||
if (this.state.hasError) {
|
|
||||||
return (
|
|
||||||
<div className="flex min-h-screen items-center justify-center p-4">
|
|
||||||
<div className="rounded-xl border border-red-500/30 bg-red-950/40 p-6 text-center shadow-lg backdrop-blur-sm">
|
|
||||||
<h2 className="mb-2 text-xl font-semibold text-red-400">Something went wrong</h2>
|
|
||||||
<p className="mb-4 text-sm text-red-300">
|
|
||||||
{this.state.error?.message || 'An unexpected error occurred.'}
|
|
||||||
</p>
|
|
||||||
<button
|
|
||||||
onClick={() => window.location.reload()}
|
|
||||||
className="rounded-lg bg-red-600 px-4 py-2 text-sm font-medium text-white hover:bg-red-700"
|
|
||||||
>
|
|
||||||
Reload Page
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return this.props.children
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default ErrorBoundary
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
export default function ErrorState({
|
|
||||||
message = 'Something went wrong.',
|
|
||||||
onRetry,
|
|
||||||
}: {
|
|
||||||
message?: string
|
|
||||||
onRetry?: () => void
|
|
||||||
}) {
|
|
||||||
return (
|
|
||||||
<div className="flex min-h-[120px] flex-col items-center justify-center gap-3 rounded-xl border border-red-500/20 bg-red-950/30 p-6 text-center">
|
|
||||||
<p className="text-sm text-red-300">{message}</p>
|
|
||||||
{onRetry && (
|
|
||||||
<button
|
|
||||||
onClick={onRetry}
|
|
||||||
className="rounded-lg bg-red-600 px-3 py-1.5 text-xs font-medium text-white hover:bg-red-700"
|
|
||||||
>
|
|
||||||
Retry
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
export default function LoadingSpinner({ size = 'md' }: { size?: 'sm' | 'md' | 'lg' }) {
|
|
||||||
const sizeClass =
|
|
||||||
size === 'sm' ? 'h-4 w-4 border-2' : size === 'lg' ? 'h-10 w-10 border-4' : 'h-6 w-6 border-2'
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex items-center justify-center p-4">
|
|
||||||
<div
|
|
||||||
className={`${sizeClass} animate-spin rounded-full border-slate-600 border-t-sky-400`}
|
|
||||||
role="status"
|
|
||||||
aria-label="Loading"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
0
frontend/src/hooks/.gitkeep
Normal file
0
frontend/src/hooks/.gitkeep
Normal file
@@ -1,11 +0,0 @@
|
|||||||
import { useQuery } from '@tanstack/react-query'
|
|
||||||
import { healthCheck } from '../services/api'
|
|
||||||
|
|
||||||
export function useHealth() {
|
|
||||||
return useQuery({
|
|
||||||
queryKey: ['health'],
|
|
||||||
queryFn: healthCheck,
|
|
||||||
retry: 2,
|
|
||||||
refetchInterval: 30000,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -2,9 +2,7 @@
|
|||||||
|
|
||||||
body {
|
body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
min-width: 320px;
|
|
||||||
min-height: 100vh;
|
min-height: 100vh;
|
||||||
background-color: #0f172a;
|
background-color: #0f172a; /* slate-900 */
|
||||||
color: #e2e8f0;
|
color: #f8fafc; /* slate-50 */
|
||||||
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,10 @@
|
|||||||
import { StrictMode } from 'react'
|
import { StrictMode } from 'react'
|
||||||
import { createRoot } from 'react-dom/client'
|
import { createRoot } from 'react-dom/client'
|
||||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
|
||||||
import { BrowserRouter } from 'react-router-dom'
|
|
||||||
import './index.css'
|
import './index.css'
|
||||||
import App from './App.tsx'
|
import App from './App'
|
||||||
|
|
||||||
const queryClient = new QueryClient()
|
|
||||||
|
|
||||||
createRoot(document.getElementById('root')!).render(
|
createRoot(document.getElementById('root')!).render(
|
||||||
<StrictMode>
|
<StrictMode>
|
||||||
<QueryClientProvider client={queryClient}>
|
<App />
|
||||||
<BrowserRouter>
|
|
||||||
<App />
|
|
||||||
</BrowserRouter>
|
|
||||||
</QueryClientProvider>
|
|
||||||
</StrictMode>,
|
</StrictMode>,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
import LoadingSpinner from '../components/LoadingSpinner'
|
|
||||||
import ErrorState from '../components/ErrorState'
|
|
||||||
import { useHealth } from '../hooks/useHealth'
|
|
||||||
|
|
||||||
export default function HomePage() {
|
|
||||||
const { data, isLoading, isError, refetch } = useHealth()
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex min-h-screen flex-col items-center justify-center gap-6 p-6">
|
|
||||||
<h1 className="text-3xl font-bold tracking-tight text-sky-400">Extrudex</h1>
|
|
||||||
<p className="text-slate-400">Filament inventory & print tracking</p>
|
|
||||||
|
|
||||||
<div className="w-full max-w-md rounded-xl border border-slate-700 bg-slate-800/60 p-6 shadow-lg backdrop-blur-sm">
|
|
||||||
<h2 className="mb-3 text-sm font-semibold uppercase tracking-wider text-slate-400">
|
|
||||||
Backend Health
|
|
||||||
</h2>
|
|
||||||
|
|
||||||
{isLoading && <LoadingSpinner />}
|
|
||||||
|
|
||||||
{isError && (
|
|
||||||
<ErrorState
|
|
||||||
message="Backend is unreachable."
|
|
||||||
onRetry={() => refetch()}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{data && (
|
|
||||||
<div className="flex items-center gap-2 text-emerald-400">
|
|
||||||
<span className="h-2 w-2 rounded-full bg-emerald-400" />
|
|
||||||
<span className="text-sm font-medium">{data.status || 'ok'}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
339
frontend/src/pages/InventoryPage.tsx
Normal file
339
frontend/src/pages/InventoryPage.tsx
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
import { useState, useMemo } from 'react'
|
||||||
|
import { useQuery } from '@tanstack/react-query'
|
||||||
|
import { Search, Filter, ChevronLeft, ChevronRight, Trash2, Pencil, Plus, AlertTriangle } from 'lucide-react'
|
||||||
|
import ColorSwatch from '../components/ColorSwatch'
|
||||||
|
import { fetchFilaments, deleteFilament } from '../services/filamentService'
|
||||||
|
import type { FilamentSpool, FilamentFilter } from '../types/filament'
|
||||||
|
|
||||||
|
const PAGE_SIZE = 20
|
||||||
|
|
||||||
|
type SortField = 'name' | 'remaining_grams' | 'cost_usd'
|
||||||
|
type SortDir = 'asc' | 'desc'
|
||||||
|
|
||||||
|
export default function InventoryPage() {
|
||||||
|
const [search, setSearch] = useState('')
|
||||||
|
const [material, setMaterial] = useState('')
|
||||||
|
const [finish, setFinish] = useState('')
|
||||||
|
const [lowStockOnly, setLowStockOnly] = useState(false)
|
||||||
|
const [sortBy, setSortBy] = useState<SortField>('name')
|
||||||
|
const [sortDir, setSortDir] = useState<SortDir>('asc')
|
||||||
|
const [page, setPage] = useState(0)
|
||||||
|
const [deleteId, setDeleteId] = useState<number | null>(null)
|
||||||
|
|
||||||
|
const filter: FilamentFilter = useMemo(() => ({
|
||||||
|
material: material || undefined,
|
||||||
|
finish: finish || undefined,
|
||||||
|
low_stock: lowStockOnly,
|
||||||
|
sort_by: sortBy,
|
||||||
|
sort_dir: sortDir,
|
||||||
|
limit: PAGE_SIZE,
|
||||||
|
offset: page * PAGE_SIZE,
|
||||||
|
}), [material, finish, lowStockOnly, sortBy, sortDir, page])
|
||||||
|
|
||||||
|
const { data, isLoading, error, refetch } = useQuery({
|
||||||
|
queryKey: ['filaments', filter],
|
||||||
|
queryFn: () => fetchFilaments(filter),
|
||||||
|
})
|
||||||
|
|
||||||
|
const filaments = data?.data ?? []
|
||||||
|
const total = data?.total ?? 0
|
||||||
|
const totalPages = Math.max(1, Math.ceil(total / PAGE_SIZE))
|
||||||
|
|
||||||
|
// Client-side search filter (name/barcode) since backend may not support it yet.
|
||||||
|
const filtered = useMemo(() => {
|
||||||
|
if (!search.trim()) return filaments
|
||||||
|
const q = search.toLowerCase()
|
||||||
|
return filaments.filter(
|
||||||
|
(f: FilamentSpool) =>
|
||||||
|
f.name.toLowerCase().includes(q) ||
|
||||||
|
(f.barcode && f.barcode.toLowerCase().includes(q))
|
||||||
|
)
|
||||||
|
}, [filaments, search])
|
||||||
|
|
||||||
|
const handleSort = (field: SortField) => {
|
||||||
|
if (sortBy === field) {
|
||||||
|
setSortDir(prev => (prev === 'asc' ? 'desc' : 'asc'))
|
||||||
|
} else {
|
||||||
|
setSortBy(field)
|
||||||
|
setSortDir('asc')
|
||||||
|
}
|
||||||
|
setPage(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleDelete = async (id: number) => {
|
||||||
|
await deleteFilament(id)
|
||||||
|
setDeleteId(null)
|
||||||
|
refetch()
|
||||||
|
}
|
||||||
|
|
||||||
|
const SortIndicator = ({ field }: { field: SortField }) => {
|
||||||
|
if (sortBy !== field) return <span className="text-slate-600 ml-1">↕</span>
|
||||||
|
return <span className="text-emerald-400 ml-1">{sortDir === 'asc' ? '↑' : '↓'}</span>
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-3">
|
||||||
|
<div>
|
||||||
|
<h2 className="text-xl font-bold text-slate-100">Filament Inventory</h2>
|
||||||
|
<p className="text-sm text-slate-400">{total} spool(s) total</p>
|
||||||
|
</div>
|
||||||
|
<button className="inline-flex items-center gap-2 rounded-lg bg-emerald-600 px-4 py-2 text-sm font-semibold text-white hover:bg-emerald-500 active:bg-emerald-700 transition-colors">
|
||||||
|
<Plus size={16} /> Add Spool
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Filters */}
|
||||||
|
<div className="flex flex-col lg:flex-row gap-3">
|
||||||
|
{/* Search */}
|
||||||
|
<div className="relative flex-1">
|
||||||
|
<Search size={16} className="absolute left-3 top-1/2 -translate-y-1/2 text-slate-400" />
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search by name or barcode…"
|
||||||
|
value={search}
|
||||||
|
onChange={e => { setSearch(e.target.value); setPage(0) }}
|
||||||
|
className="w-full rounded-lg bg-slate-800 border border-slate-700 pl-9 pr-3 py-2 text-sm text-slate-100 placeholder-slate-500 focus:outline-none focus:ring-2 focus:ring-emerald-500 focus:border-emerald-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Material filter */}
|
||||||
|
<select
|
||||||
|
value={material}
|
||||||
|
onChange={e => { setMaterial(e.target.value); setPage(0) }}
|
||||||
|
className="rounded-lg bg-slate-800 border border-slate-700 px-3 py-2 text-sm text-slate-100 focus:outline-none focus:ring-2 focus:ring-emerald-500"
|
||||||
|
>
|
||||||
|
<option value="">All Materials</option>
|
||||||
|
<option value="PLA">PLA</option>
|
||||||
|
<option value="PETG">PETG</option>
|
||||||
|
<option value="ABS">ABS</option>
|
||||||
|
<option value="TPU">TPU</option>
|
||||||
|
<option value="ASA">ASA</option>
|
||||||
|
<option value="Nylon">Nylon</option>
|
||||||
|
<option value="PC">PC</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
{/* Finish filter */}
|
||||||
|
<select
|
||||||
|
value={finish}
|
||||||
|
onChange={e => { setFinish(e.target.value); setPage(0) }}
|
||||||
|
className="rounded-lg bg-slate-800 border border-slate-700 px-3 py-2 text-sm text-slate-100 focus:outline-none focus:ring-2 focus:ring-emerald-500"
|
||||||
|
>
|
||||||
|
<option value="">All Finishes</option>
|
||||||
|
<option value="Basic">Basic</option>
|
||||||
|
<option value="Silk">Silk</option>
|
||||||
|
<option value="Matte">Matte</option>
|
||||||
|
<option value="Glossy">Glossy</option>
|
||||||
|
<option value="Wood">Wood</option>
|
||||||
|
<option value="Marble">Marble</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
{/* Low stock toggle */}
|
||||||
|
<label className="inline-flex items-center gap-2 rounded-lg bg-slate-800 border border-slate-700 px-3 py-2 text-sm text-slate-100 cursor-pointer select-none hover:bg-slate-750">
|
||||||
|
<Filter size={14} className="text-amber-400" />
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={lowStockOnly}
|
||||||
|
onChange={e => { setLowStockOnly(e.target.checked); setPage(0) }}
|
||||||
|
className="accent-amber-500"
|
||||||
|
/>
|
||||||
|
Low Stock Only
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Loading / Error */}
|
||||||
|
{isLoading && (
|
||||||
|
<div className="text-center py-12 text-slate-400">Loading spools…</div>
|
||||||
|
)}
|
||||||
|
{error && (
|
||||||
|
<div className="text-center py-12 text-red-400">
|
||||||
|
Failed to load inventory.
|
||||||
|
<button onClick={() => refetch()} className="ml-2 underline hover:text-red-300">Retry</button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Desktop Table */}
|
||||||
|
{!isLoading && !error && (
|
||||||
|
<>
|
||||||
|
<div className="hidden md:block overflow-x-auto rounded-lg border border-slate-700">
|
||||||
|
<table className="w-full text-sm">
|
||||||
|
<thead className="bg-slate-800 text-slate-300">
|
||||||
|
<tr>
|
||||||
|
<th className="px-4 py-3 text-left font-semibold cursor-pointer select-none hover:text-slate-100" onClick={() => handleSort('name')}>
|
||||||
|
Name <SortIndicator field="name" />
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-3 text-left font-semibold">Material</th>
|
||||||
|
<th className="px-4 py-3 text-left font-semibold">Finish</th>
|
||||||
|
<th className="px-4 py-3 text-left font-semibold">Color</th>
|
||||||
|
<th className="px-4 py-3 text-right font-semibold cursor-pointer select-none hover:text-slate-100" onClick={() => handleSort('remaining_grams')}>
|
||||||
|
Remaining <SortIndicator field="remaining_grams" />
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-3 text-right font-semibold cursor-pointer select-none hover:text-slate-100" onClick={() => handleSort('cost_usd')}>
|
||||||
|
Cost <SortIndicator field="cost_usd" />
|
||||||
|
</th>
|
||||||
|
<th className="px-4 py-3 text-center font-semibold">Status</th>
|
||||||
|
<th className="px-4 py-3 text-right font-semibold">Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-slate-700">
|
||||||
|
{filtered.length === 0 && (
|
||||||
|
<tr>
|
||||||
|
<td colSpan={8} className="px-4 py-8 text-center text-slate-500">No spools found.</td>
|
||||||
|
</tr>
|
||||||
|
)}
|
||||||
|
{filtered.map((spool: FilamentSpool) => {
|
||||||
|
const isLow = spool.remaining_grams <= spool.low_stock_threshold_grams
|
||||||
|
return (
|
||||||
|
<tr key={spool.id} className={`${isLow ? 'bg-red-900/20' : 'bg-slate-800/50'} hover:bg-slate-700/50 transition-colors`}>
|
||||||
|
<td className="px-4 py-3 font-medium text-slate-100">{spool.name}</td>
|
||||||
|
<td className="px-4 py-3 text-slate-300">{spool.material_base?.name ?? '—'}</td>
|
||||||
|
<td className="px-4 py-3 text-slate-300">{spool.material_finish?.name ?? '—'}</td>
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<ColorSwatch colorHex={spool.color_hex} size={20} />
|
||||||
|
<span className="text-xs text-slate-400 uppercase">{spool.color_hex}</span>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-right tabular-nums text-slate-200">{spool.remaining_grams.toLocaleString()} g</td>
|
||||||
|
<td className="px-4 py-3 text-right tabular-nums text-slate-300">{spool.cost_usd != null ? `$${spool.cost_usd.toFixed(2)}` : '—'}</td>
|
||||||
|
<td className="px-4 py-3 text-center">
|
||||||
|
{isLow ? (
|
||||||
|
<span className="inline-flex items-center gap-1 rounded-full bg-red-900/50 border border-red-700 px-2 py-0.5 text-xs font-medium text-red-300">
|
||||||
|
<AlertTriangle size={12} /> Low
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<span className="inline-flex items-center rounded-full bg-emerald-900/30 border border-emerald-700 px-2 py-0.5 text-xs font-medium text-emerald-300">OK</span>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-right">
|
||||||
|
<div className="flex items-center justify-end gap-2">
|
||||||
|
<button className="p-1.5 rounded hover:bg-slate-600 text-slate-400 hover:text-blue-400 transition-colors" title="Edit">
|
||||||
|
<Pencil size={14} />
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setDeleteId(spool.id)}
|
||||||
|
className="p-1.5 rounded hover:bg-slate-600 text-slate-400 hover:text-red-400 transition-colors"
|
||||||
|
title="Delete"
|
||||||
|
>
|
||||||
|
<Trash2 size={14} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Mobile Cards */}
|
||||||
|
<div className="md:hidden space-y-3">
|
||||||
|
{filtered.length === 0 && (
|
||||||
|
<div className="text-center py-12 text-slate-500">No spools found.</div>
|
||||||
|
)}
|
||||||
|
{filtered.map((spool: FilamentSpool) => {
|
||||||
|
const isLow = spool.remaining_grams <= spool.low_stock_threshold_grams
|
||||||
|
return (
|
||||||
|
<div key={spool.id} className={`rounded-lg border ${isLow ? 'border-red-700 bg-red-900/10' : 'border-slate-700 bg-slate-800'} p-4 space-y-2`}>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div>
|
||||||
|
<div className="font-semibold text-slate-100">{spool.name}</div>
|
||||||
|
<div className="text-xs text-slate-400 mt-0.5">{spool.material_base?.name ?? '—'} · {spool.material_finish?.name ?? '—'}</div>
|
||||||
|
</div>
|
||||||
|
{isLow ? (
|
||||||
|
<span className="inline-flex items-center gap-1 rounded-full bg-red-900/50 border border-red-700 px-2 py-0.5 text-xs font-medium text-red-300">
|
||||||
|
<AlertTriangle size={12} /> Low
|
||||||
|
</span>
|
||||||
|
) : (
|
||||||
|
<span className="inline-flex items-center rounded-full bg-emerald-900/30 border border-emerald-700 px-2 py-0.5 text-xs font-medium text-emerald-300">OK</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-3 text-sm">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<ColorSwatch colorHex={spool.color_hex} size={20} />
|
||||||
|
<span className="text-slate-400 uppercase text-xs">{spool.color_hex}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center justify-between text-sm">
|
||||||
|
<span className="text-slate-400">Remaining: <span className="text-slate-200 font-medium tabular-nums">{spool.remaining_grams.toLocaleString()} g</span></span>
|
||||||
|
<span className="text-slate-400">{spool.cost_usd != null ? `$${spool.cost_usd.toFixed(2)}` : '—'}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center justify-end gap-2 pt-1">
|
||||||
|
<button className="flex items-center gap-1 rounded-md bg-slate-700 px-3 py-1.5 text-xs font-medium text-slate-200 hover:bg-slate-600">
|
||||||
|
<Pencil size={12} /> Edit
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => setDeleteId(spool.id)}
|
||||||
|
className="flex items-center gap-1 rounded-md bg-red-900/30 border border-red-700 px-3 py-1.5 text-xs font-medium text-red-300 hover:bg-red-900/50"
|
||||||
|
>
|
||||||
|
<Trash2 size={12} /> Delete
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Pagination */}
|
||||||
|
<div className="flex items-center justify-between pt-2">
|
||||||
|
<span className="text-sm text-slate-400">
|
||||||
|
Showing {page * PAGE_SIZE + 1}–{Math.min((page + 1) * PAGE_SIZE, total)} of {total}
|
||||||
|
</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<button
|
||||||
|
onClick={() => setPage(p => Math.max(0, p - 1))}
|
||||||
|
disabled={page === 0}
|
||||||
|
className="p-2 rounded-lg bg-slate-800 border border-slate-700 text-slate-300 hover:bg-slate-700 disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
||||||
|
>
|
||||||
|
<ChevronLeft size={16} />
|
||||||
|
</button>
|
||||||
|
<span className="text-sm text-slate-300 tabular-nums">{page + 1} / {totalPages}</span>
|
||||||
|
<button
|
||||||
|
onClick={() => setPage(p => Math.min(totalPages - 1, p + 1))}
|
||||||
|
disabled={page >= totalPages - 1}
|
||||||
|
className="p-2 rounded-lg bg-slate-800 border border-slate-700 text-slate-300 hover:bg-slate-700 disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
||||||
|
>
|
||||||
|
<ChevronRight size={16} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Delete confirmation modal */}
|
||||||
|
{deleteId !== null && (
|
||||||
|
<div className="fixed inset-0 z-50 flex items-center justify-center bg-black/60 backdrop-blur-sm p-4">
|
||||||
|
<div className="w-full max-w-sm rounded-xl bg-slate-800 border border-slate-700 p-6 shadow-2xl space-y-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="flex h-10 w-10 items-center justify-center rounded-full bg-red-900/30">
|
||||||
|
<AlertTriangle size={20} className="text-red-400" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h3 className="text-lg font-semibold text-slate-100">Delete Spool?</h3>
|
||||||
|
<p className="text-sm text-slate-400">This action cannot be undone.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex justify-end gap-3">
|
||||||
|
<button
|
||||||
|
onClick={() => setDeleteId(null)}
|
||||||
|
className="rounded-lg bg-slate-700 px-4 py-2 text-sm font-medium text-slate-200 hover:bg-slate-600 transition-colors"
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
onClick={() => handleDelete(deleteId)}
|
||||||
|
className="rounded-lg bg-red-600 px-4 py-2 text-sm font-medium text-white hover:bg-red-500 transition-colors"
|
||||||
|
>
|
||||||
|
Delete
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
import axios from 'axios'
|
|
||||||
|
|
||||||
const API_BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8080'
|
|
||||||
|
|
||||||
export const api = axios.create({
|
|
||||||
baseURL: API_BASE_URL,
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
timeout: 10000,
|
|
||||||
})
|
|
||||||
|
|
||||||
api.interceptors.response.use(
|
|
||||||
(response) => response,
|
|
||||||
(error) => {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.error('API error:', error)
|
|
||||||
return Promise.reject(error)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
export async function healthCheck(): Promise<{ status: string }> {
|
|
||||||
const { data } = await api.get('/health')
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
24
frontend/src/services/filamentService.ts
Normal file
24
frontend/src/services/filamentService.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
import type { FilamentSpool, ListResponse, FilamentFilter } from '../types/filament'
|
||||||
|
|
||||||
|
const API_BASE = '/api'
|
||||||
|
|
||||||
|
export async function fetchFilaments(filter: FilamentFilter): Promise<ListResponse<FilamentSpool>> {
|
||||||
|
const params = new URLSearchParams()
|
||||||
|
if (filter.material) params.set('material', filter.material)
|
||||||
|
if (filter.finish) params.set('finish', filter.finish)
|
||||||
|
if (filter.color) params.set('color', filter.color)
|
||||||
|
if (filter.low_stock) params.set('low_stock', 'true')
|
||||||
|
if (filter.search) params.set('search', filter.search)
|
||||||
|
if (filter.sort_by) params.set('sort_by', filter.sort_by)
|
||||||
|
if (filter.sort_dir) params.set('sort_dir', filter.sort_dir)
|
||||||
|
if (filter.limit !== undefined) params.set('limit', String(filter.limit))
|
||||||
|
if (filter.offset !== undefined) params.set('offset', String(filter.offset))
|
||||||
|
|
||||||
|
const res = await axios.get<ListResponse<FilamentSpool>>(`${API_BASE}/filaments?${params.toString()}`)
|
||||||
|
return res.data
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteFilament(id: number): Promise<void> {
|
||||||
|
await axios.delete(`${API_BASE}/filaments/${id}`)
|
||||||
|
}
|
||||||
72
frontend/src/types/filament.ts
Normal file
72
frontend/src/types/filament.ts
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
// Extrudex domain types
|
||||||
|
|
||||||
|
export interface MaterialBase {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
density_g_cm3: number
|
||||||
|
extrusion_temp_min?: number
|
||||||
|
extrusion_temp_max?: number
|
||||||
|
bed_temp_min?: number
|
||||||
|
bed_temp_max?: number
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MaterialFinish {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
description?: string
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MaterialModifier {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
description?: string
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FilamentSpool {
|
||||||
|
id: number
|
||||||
|
name: string
|
||||||
|
material_base_id: number
|
||||||
|
material_base?: MaterialBase
|
||||||
|
material_finish_id: number
|
||||||
|
material_finish?: MaterialFinish
|
||||||
|
material_modifier_id?: number
|
||||||
|
material_modifier?: MaterialModifier
|
||||||
|
color_hex: string
|
||||||
|
brand?: string
|
||||||
|
diameter_mm: number
|
||||||
|
initial_grams: number
|
||||||
|
remaining_grams: number
|
||||||
|
spool_weight_grams?: number
|
||||||
|
cost_usd?: number
|
||||||
|
low_stock_threshold_grams: number
|
||||||
|
notes?: string
|
||||||
|
barcode?: string
|
||||||
|
deleted_at?: string
|
||||||
|
created_at: string
|
||||||
|
updated_at: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListResponse<T> {
|
||||||
|
data: T[]
|
||||||
|
total: number
|
||||||
|
limit: number
|
||||||
|
offset: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FilamentFilter {
|
||||||
|
material?: string
|
||||||
|
finish?: string
|
||||||
|
color?: string
|
||||||
|
low_stock?: boolean
|
||||||
|
search?: string
|
||||||
|
sort_by?: string
|
||||||
|
sort_dir?: 'asc' | 'desc'
|
||||||
|
limit?: number
|
||||||
|
offset?: number
|
||||||
|
}
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
// Shared TypeScript types for Extrudex frontend
|
|
||||||
// Placeholder — expand as API contracts stabilize
|
|
||||||
|
|
||||||
export interface HealthResponse {
|
|
||||||
status: string
|
|
||||||
}
|
|
||||||
8
frontend/src/vite-env.d.ts
vendored
8
frontend/src/vite-env.d.ts
vendored
@@ -1,9 +1 @@
|
|||||||
/// <reference types="vite/client" />
|
/// <reference types="vite/client" />
|
||||||
|
|
||||||
interface ImportMetaEnv {
|
|
||||||
readonly VITE_API_BASE_URL: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ImportMeta {
|
|
||||||
readonly env: ImportMetaEnv
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,13 +5,7 @@ export default {
|
|||||||
"./src/**/*.{js,ts,jsx,tsx}",
|
"./src/**/*.{js,ts,jsx,tsx}",
|
||||||
],
|
],
|
||||||
theme: {
|
theme: {
|
||||||
extend: {
|
extend: {},
|
||||||
colors: {
|
|
||||||
slate: {
|
|
||||||
850: '#1e293b',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
plugins: [],
|
plugins: [],
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,38 +1,24 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
"target": "ES2020",
|
||||||
"target": "ES2023",
|
"useDefineForClassFields": true,
|
||||||
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||||
"module": "ESNext",
|
"module": "ESNext",
|
||||||
"types": ["vite/client"],
|
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
|
|
||||||
/* Bundler mode */
|
|
||||||
"moduleResolution": "bundler",
|
"moduleResolution": "bundler",
|
||||||
"allowImportingTsExtensions": true,
|
"allowImportingTsExtensions": true,
|
||||||
"verbatimModuleSyntax": true,
|
"isolatedModules": true,
|
||||||
"moduleDetection": "force",
|
"moduleDetection": "force",
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
"jsx": "react-jsx",
|
"jsx": "react-jsx",
|
||||||
|
"strict": true,
|
||||||
/* Linting */
|
|
||||||
"noUnusedLocals": true,
|
"noUnusedLocals": true,
|
||||||
"noUnusedParameters": true,
|
"noUnusedParameters": true,
|
||||||
"erasableSyntaxOnly": true,
|
|
||||||
"noFallthroughCasesInSwitch": true,
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"baseUrl": ".",
|
||||||
/* Strict mode */
|
"paths": {
|
||||||
"strict": true,
|
"@/*": ["src/*"]
|
||||||
"noImplicitAny": true,
|
}
|
||||||
"strictNullChecks": true,
|
|
||||||
"strictFunctionTypes": true,
|
|
||||||
"strictBindCallApply": true,
|
|
||||||
"strictPropertyInitialization": true,
|
|
||||||
"noImplicitThis": true,
|
|
||||||
"alwaysStrict": true,
|
|
||||||
"noUncheckedIndexedAccess": true,
|
|
||||||
"exactOptionalPropertyTypes": true,
|
|
||||||
"noImplicitReturns": true
|
|
||||||
},
|
},
|
||||||
"include": ["src"]
|
"include": ["src"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,24 @@
|
|||||||
{
|
{
|
||||||
"files": [],
|
"compilerOptions": {
|
||||||
"references": [
|
"target": "ES2020",
|
||||||
{ "path": "./tsconfig.app.json" },
|
"useDefineForClassFields": true,
|
||||||
{ "path": "./tsconfig.node.json" }
|
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||||
]
|
"module": "ESNext",
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"noEmit": true,
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
"strict": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"paths": {
|
||||||
|
"@/*": ["src/*"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,24 +1,11 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
"composite": true,
|
||||||
"target": "es2023",
|
|
||||||
"lib": ["ES2023"],
|
|
||||||
"module": "esnext",
|
|
||||||
"types": ["node"],
|
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
|
"module": "ESNext",
|
||||||
/* Bundler mode */
|
|
||||||
"moduleResolution": "bundler",
|
"moduleResolution": "bundler",
|
||||||
"allowImportingTsExtensions": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"verbatimModuleSyntax": true,
|
"strict": true
|
||||||
"moduleDetection": "force",
|
|
||||||
"noEmit": true,
|
|
||||||
|
|
||||||
/* Linting */
|
|
||||||
"noUnusedLocals": true,
|
|
||||||
"noUnusedParameters": true,
|
|
||||||
"erasableSyntaxOnly": true,
|
|
||||||
"noFallthroughCasesInSwitch": true
|
|
||||||
},
|
},
|
||||||
"include": ["vite.config.ts"]
|
"include": ["vite.config.ts"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,15 +2,18 @@ import { defineConfig } from 'vite'
|
|||||||
import react from '@vitejs/plugin-react'
|
import react from '@vitejs/plugin-react'
|
||||||
import tailwindcss from '@tailwindcss/vite'
|
import tailwindcss from '@tailwindcss/vite'
|
||||||
|
|
||||||
// https://vite.dev/config/
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [react(), tailwindcss()],
|
plugins: [react(), tailwindcss()],
|
||||||
server: {
|
server: {
|
||||||
port: 5173,
|
port: 5173,
|
||||||
host: true,
|
proxy: {
|
||||||
|
'/api': {
|
||||||
|
target: 'http://localhost:8080',
|
||||||
|
changeOrigin: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
build: {
|
build: {
|
||||||
outDir: 'dist',
|
outDir: 'dist',
|
||||||
sourcemap: true,
|
}
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|||||||
Reference in New Issue
Block a user