Compare commits
84 Commits
agent/dex/
...
agent/Dex/
| Author | SHA1 | Date | |
|---|---|---|---|
| 90fd028bfc | |||
| f1614029b5 | |||
| 1109d1dd2f | |||
| fd26b205bf | |||
| 41f66005a6 | |||
| 62d74beba4 | |||
| fca2ef5b84 | |||
| 3ac8432360 | |||
| f15597966f | |||
| a54fcdd371 | |||
| 1b86d617cd | |||
|
|
fd39fff433 | ||
| 2243859286 | |||
|
|
3fe0850711 | ||
| 42285c5dac | |||
| 9cd619b5ee | |||
| ddae95767f | |||
| 15187cab65 | |||
| 9112f78641 | |||
| 57157ad947 | |||
| a2707e02ee | |||
| 9192ece040 | |||
| fa4a4c21b3 | |||
| f2d9b7f455 | |||
| 808d5f909d | |||
| b7e61fab8a | |||
| 5ede6a8eb6 | |||
| e56aa3ba39 | |||
| f70495a85c | |||
| bb35ed1eab | |||
| 1f03606468 | |||
| 1b4fc22f59 | |||
| b86dda97a3 | |||
| 8b2a29881d | |||
| 90a89eecf3 | |||
| 215033f3e5 | |||
| a28d032b16 | |||
| a90627de28 | |||
| e9e856a012 | |||
| 46d28676f0 | |||
| ed0efd598b | |||
| 19415003a2 | |||
| 7904fcda02 | |||
| 3d3b7059cf | |||
| fc6134b162 | |||
| 51bfb6d115 | |||
| aa182af979 | |||
| ac033859a8 | |||
| c3a0f210a1 | |||
| 2017843dc1 | |||
| c150f54c64 | |||
| 73363206ec | |||
| 174dd294e9 | |||
| 0378aee43e | |||
| 72a39ec766 | |||
| c05b9dd87d | |||
| 5a577e1871 | |||
| 2e8227c3f9 | |||
| d207c49ffd | |||
| 5b9dde13fe | |||
| fd9fcd47ab | |||
| cfd4a81b5f | |||
| 8a2f97d2cd | |||
| d43985cad9 | |||
| b43edad5f0 | |||
| f5ca20307e | |||
| 12888c4f3f | |||
| 1411b68a95 | |||
| 7daa7d637c | |||
| c88ad43530 | |||
| 6aa31f4be3 | |||
| 4ba98966eb | |||
| c1a115c938 | |||
| 61178ebb7b | |||
| 920042acac | |||
| 8168d25bdf | |||
| fc4c9cf397 | |||
| d5b5b44dc2 | |||
| 0cd8bb1939 | |||
|
|
1ee7562e81 | ||
| 311dd2ee7f | |||
| 7d0369b8e9 | |||
|
|
3d67610575 | ||
|
|
9cd27e213b |
@@ -20,12 +20,15 @@ jobs:
|
||||
|
||||
- name: Restore backend
|
||||
run: dotnet restore
|
||||
working-directory: ./backend
|
||||
|
||||
- name: Build backend
|
||||
run: dotnet build --no-restore --configuration Release
|
||||
working-directory: ./backend
|
||||
|
||||
- name: Test backend
|
||||
run: dotnet test --no-build --configuration Release
|
||||
working-directory: ./backend
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -39,39 +42,3 @@ jobs:
|
||||
- name: Build frontend
|
||||
run: npm run build
|
||||
working-directory: ./frontend
|
||||
|
||||
deploy-dev:
|
||||
needs: build-test
|
||||
if: gitea.event_name == 'push'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Deploy dev
|
||||
run: |
|
||||
echo "${{ secrets.DEV_DEPLOY_SSH_KEY }}" > /tmp/dev_key
|
||||
chmod 600 /tmp/dev_key
|
||||
ssh -i /tmp/dev_key -o StrictHostKeyChecking=no \
|
||||
${{ secrets.DEV_DEPLOY_USER }}@${{ secrets.DEV_DEPLOY_HOST }} \
|
||||
"${{ secrets.DEV_DEPLOY_PATH }}/deploy.sh"
|
||||
|
||||
notify-success:
|
||||
needs: [build-test, deploy-dev]
|
||||
if: success() && gitea.event_name == 'push'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify Slack success
|
||||
run: |
|
||||
curl -X POST -H 'Content-type: application/json' \
|
||||
--data "{\"text\":\"✅ Extrudex dev deployed successfully from dev branch.\"}" \
|
||||
"${{ secrets.SLACK_WEBHOOK_URL }}"
|
||||
|
||||
notify-failure:
|
||||
needs: [build-test, deploy-dev]
|
||||
if: failure()
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Notify Slack failure
|
||||
run: |
|
||||
curl -X POST -H 'Content-type: application/json' \
|
||||
--data "{\"text\":\"🚨 Extrudex dev pipeline failed. Check Gitea Actions for details.\"}" \
|
||||
"${{ secrets.SLACK_WEBHOOK_URL }}"
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -2,4 +2,9 @@ bin/
|
||||
obj/
|
||||
*.user
|
||||
*.suo
|
||||
.vs/
|
||||
.vs/
|
||||
|
||||
# Frontend build artifacts
|
||||
frontend/dist/
|
||||
frontend/node_modules/
|
||||
frontend/.angular/
|
||||
220
README.md
220
README.md
@@ -0,0 +1,220 @@
|
||||
# Extrudex
|
||||
|
||||
> Filament inventory and print tracking system for CubeCraft Creations.
|
||||
|
||||
Extrudex replaces Spoolman with a fully custom solution built for Joshua's 7-printer fleet. It tracks spool stock, per-print material consumption, and cost-of-goods — with a touch-optimized kiosk interface on a Raspberry Pi 5.
|
||||
|
||||
---
|
||||
|
||||
## Tech Stack
|
||||
|
||||
| Layer | Technology |
|
||||
|---|---|
|
||||
| Backend | ASP.NET Core Web API (.NET 8) |
|
||||
| Database | PostgreSQL (snake_case via EF Core) |
|
||||
| ORM | Entity Framework Core |
|
||||
| Real-time | SignalR (`PrinterHub`) |
|
||||
| Printer integration | Moonraker REST/WebSocket (Elegoo) · MQTTnet + TLS (Bambu Lab) |
|
||||
| Frontend | Angular 17+, Angular Material |
|
||||
| Deployment | Docker · Docker Compose |
|
||||
|
||||
---
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
Extrudex/
|
||||
├── backend/
|
||||
│ ├── Domain/
|
||||
│ │ ├── Base/ # BaseEntity, AuditableEntity
|
||||
│ │ ├── Entities/ # Spool, Printer, PrintJob, FilamentUsage,
|
||||
│ │ │ # AmsUnit, AmsSlot, MaterialBase,
|
||||
│ │ │ # MaterialFinish, MaterialModifier
|
||||
│ │ ├── Enums/ # ConnectionType, DataSource, JobStatus,
|
||||
│ │ │ # PrinterStatus, PrinterType, QrResourceType
|
||||
│ │ └── Interfaces/ # ICostPerPrintService, IFilamentUsageSyncService,
|
||||
│ │ # IMoonrakerClient, IQrCodeService
|
||||
│ ├── Infrastructure/
|
||||
│ │ ├── Configuration/ # FilamentUsageSyncOptions
|
||||
│ │ ├── Data/
|
||||
│ │ │ ├── Configurations/ # EF Core fluent configs (snake_case)
|
||||
│ │ │ ├── Migrations/ # EF migrations
|
||||
│ │ │ ├── Seed/ # SeedData.cs
|
||||
│ │ │ └── ExtrudexDbContext.cs
|
||||
│ │ └── Services/ # CostPerPrintService, FilamentUsageSyncService,
|
||||
│ │ # MoonrakerClient, QrCodeService
|
||||
│ └── API/
|
||||
│ ├── Controllers/ # Filaments, Spools, Printers, PrintJobs,
|
||||
│ │ # MaterialBases, MaterialFinishes,
|
||||
│ │ # MaterialModifiers, MaterialLookups,
|
||||
│ │ # CostAnalysis, QR
|
||||
│ ├── DTOs/ # Request/response shapes per domain
|
||||
│ ├── Filters/ # FluentValidationFilter
|
||||
│ ├── Hubs/ # PrinterHub, IPrinterClient
|
||||
│ ├── Jobs/ # FilamentUsageSyncJob (background)
|
||||
│ ├── Validators/ # FluentValidation validators
|
||||
│ ├── Program.cs
|
||||
│ └── appsettings.json
|
||||
├── frontend/
|
||||
│ └── src/app/
|
||||
│ ├── components/ # DashboardSummary, FilamentFilter, FilamentTable
|
||||
│ ├── models/ # Filament, Agent model types
|
||||
│ └── app.routes.ts
|
||||
├── design/ # UX specs and mockups (kiosk + mobile)
|
||||
├── docker-compose.dev.yml
|
||||
├── deploy.sh
|
||||
└── README.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Domain Model
|
||||
|
||||
### Materials (normalized taxonomy)
|
||||
|
||||
| Entity | Description |
|
||||
|---|---|
|
||||
| `MaterialBase` | The base material type — PLA, PETG, ABS, ASA, TPU, etc. |
|
||||
| `MaterialFinish` | Required. Surface finish — Basic (default), Matte, Silk, Sparkle, etc. |
|
||||
| `MaterialModifier` | Optional. Composite fill — Carbon Fiber, Glass Fiber, Wood, etc. |
|
||||
|
||||
**Rules:**
|
||||
- `MaterialFinish` is required — every spool must have one. Default is `"Basic"`.
|
||||
- `MaterialModifier` is optional — plain PLA has no modifier.
|
||||
|
||||
### Consumption calculation
|
||||
|
||||
```
|
||||
grams_used = mm_extruded × filament_cross_section_area × material_density
|
||||
```
|
||||
|
||||
Grams are always derived, never assumed from printer telemetry directly.
|
||||
|
||||
### Printers
|
||||
|
||||
| Type | Integration |
|
||||
|---|---|
|
||||
| Bambu Lab (×5) | MQTTnet with TLS |
|
||||
| Elegoo Centauri Carbon | Moonraker REST + WebSocket |
|
||||
| Elegoo Saturn (resin ×2) | Manual / future |
|
||||
|
||||
AMS units and slots are modelled as `AmsUnit` → `AmsSlot[]` → `Spool`.
|
||||
|
||||
---
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
1. **Spoolman rejected** — Full custom system for data model control and workflow flexibility.
|
||||
2. **`"Basic"` not `"Standard"`** — Default `MaterialFinish` value is `Basic`.
|
||||
3. **`MaterialFinish` is required** — No null/optional finish state allowed.
|
||||
4. **`MaterialModifier` is optional** — Not every spool has a modifier.
|
||||
5. **Derived consumption** — Grams calculated from mm × density, never assumed.
|
||||
6. **Push over poll** — SignalR and MQTT preferred over periodic polling.
|
||||
7. **Snake_case PostgreSQL** — All database identifiers follow this convention via EF Core.
|
||||
|
||||
---
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- .NET 8 SDK
|
||||
- Node.js 20+
|
||||
- Docker + Docker Compose
|
||||
- PostgreSQL (or use the dev compose stack)
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
|
||||
# Restore and build
|
||||
dotnet restore
|
||||
dotnet build
|
||||
|
||||
# Apply migrations
|
||||
dotnet ef database update
|
||||
|
||||
# Run API (dev)
|
||||
dotnet run --project API
|
||||
```
|
||||
|
||||
API runs at `http://localhost:5000` · Swagger at `http://localhost:5000/swagger`
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
ng serve
|
||||
```
|
||||
|
||||
Frontend runs at `http://localhost:4200`
|
||||
|
||||
### Docker (dev stack)
|
||||
|
||||
```bash
|
||||
docker-compose -f docker-compose.dev.yml up
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
`backend/appsettings.json` — override in `appsettings.Development.json` or environment variables:
|
||||
|
||||
| Key | Default | Description |
|
||||
|---|---|---|
|
||||
| `ConnectionStrings:ExtrudexDb` | `Host=localhost;...` | PostgreSQL connection string |
|
||||
| `FilamentUsageSync:PollingInterval` | `00:05:00` | Sync job interval |
|
||||
| `FilamentUsageSync:RequestTimeout` | `00:00:30` | Moonraker request timeout |
|
||||
| `FilamentUsageSync:Enabled` | `true` | Enable/disable background sync |
|
||||
|
||||
---
|
||||
|
||||
## Real-Time Events
|
||||
|
||||
SignalR hub endpoint: `/hubs/printer`
|
||||
|
||||
Clients receive `PrinterHub` events for live printer status, job progress, and spool consumption updates.
|
||||
|
||||
---
|
||||
|
||||
## API Overview
|
||||
|
||||
| Route prefix | Resource |
|
||||
|---|---|
|
||||
| `/api/filaments` | Filament catalog |
|
||||
| `/api/spools` | Spool inventory |
|
||||
| `/api/printers` | Printer registry |
|
||||
| `/api/print-jobs` | Print job tracking |
|
||||
| `/api/material-bases` | Material base types |
|
||||
| `/api/material-finishes` | Material finishes |
|
||||
| `/api/material-modifiers` | Material modifiers |
|
||||
| `/api/material-lookups` | Combined material lookup |
|
||||
| `/api/cost-analysis` | Cost-per-print and COGS |
|
||||
| `/api/qr` | QR code generation |
|
||||
|
||||
Full schema available at `/swagger` when running in dev.
|
||||
|
||||
---
|
||||
|
||||
## CI
|
||||
|
||||
Gitea Actions pipeline (`.gitea/workflows/dev.yml`) runs on every push to `dev`:
|
||||
|
||||
- `dotnet build`
|
||||
- Frontend `ng build`
|
||||
|
||||
---
|
||||
|
||||
## Branch & PR Rules
|
||||
|
||||
- All feature branches target `dev` — **never `main`**
|
||||
- Branch naming: `agent/<agent>/CUB-N-short-description`
|
||||
- PR titles: `CUB-N: short description`
|
||||
- PRs require Otto review before Joshua merges
|
||||
|
||||
---
|
||||
|
||||
*Built by CubeCraft Creations · Orchestrated by Otto*
|
||||
|
||||
27
backend/.dockerignore
Normal file
27
backend/.dockerignore
Normal file
@@ -0,0 +1,27 @@
|
||||
# Build artifacts
|
||||
bin/
|
||||
obj/
|
||||
|
||||
# IDE / editor
|
||||
.vs/
|
||||
.vscode/
|
||||
*.user
|
||||
*.suo
|
||||
.idea/
|
||||
|
||||
# Environment & secrets
|
||||
appsettings.Development.json
|
||||
.env
|
||||
.env.*
|
||||
|
||||
# Docker
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Misc
|
||||
*.md
|
||||
*.log
|
||||
108
backend/API/Controllers/CostAnalysisController.cs
Normal file
108
backend/API/Controllers/CostAnalysisController.cs
Normal file
@@ -0,0 +1,108 @@
|
||||
using Extrudex.API.DTOs.PrintJobs;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace Extrudex.API.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// Controller for cost analysis endpoints. Provides spool-level
|
||||
/// cost breakdowns and aggregated COGS reporting.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Route("api/cost-analysis")]
|
||||
public class CostAnalysisController : ControllerBase
|
||||
{
|
||||
private readonly ICostPerPrintService _costService;
|
||||
private readonly ILogger<CostAnalysisController> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="CostAnalysisController"/> class.
|
||||
/// </summary>
|
||||
/// <param name="costService">The cost-per-print calculation service.</param>
|
||||
/// <param name="logger">The logger for diagnostic output.</param>
|
||||
public CostAnalysisController(
|
||||
ICostPerPrintService costService,
|
||||
ILogger<CostAnalysisController> logger)
|
||||
{
|
||||
_costService = costService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
// ── POST /api/cost-analysis/spool ────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Calculates cost breakdowns for all print jobs associated with a specific spool.
|
||||
/// Returns per-job costs plus an aggregated total. Jobs with missing cost data
|
||||
/// include warnings and null cost fields — the endpoint never throws for missing data.
|
||||
/// </summary>
|
||||
/// <param name="request">The request containing the spool identifier.</param>
|
||||
/// <returns>A spool-level cost summary with per-job breakdowns.</returns>
|
||||
/// <response code="200">Returns the spool cost breakdown with per-job details.</response>
|
||||
/// <response code="404">If the spool has no print jobs.</response>
|
||||
[HttpPost("spool")]
|
||||
[ProducesResponseType(typeof(SpoolCostResponse), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<SpoolCostResponse>> CalculateSpoolCost([FromBody] SpoolCostRequest request)
|
||||
{
|
||||
_logger.LogDebug("Calculating cost breakdown for spool {SpoolId}", request.SpoolId);
|
||||
|
||||
var results = await _costService.CalculateBySpoolAsync(request.SpoolId);
|
||||
|
||||
if (results.Count == 0)
|
||||
{
|
||||
return NotFound(new { error = $"No print jobs found for spool with ID '{request.SpoolId}'." });
|
||||
}
|
||||
|
||||
// Build the spool-level summary
|
||||
var firstResult = results[0];
|
||||
var jobResponses = results.Select(MapCostToResponse).ToList();
|
||||
|
||||
// Aggregate total cost and grams — only include jobs that have a valid cost
|
||||
var calculableJobs = results.Where(r => r.CostPerPrint.HasValue).ToList();
|
||||
var totalCost = calculableJobs.Count == results.Count
|
||||
? Math.Round(calculableJobs.Sum(r => r.CostPerPrint!.Value), 4)
|
||||
: (decimal?)null;
|
||||
|
||||
var aggregateWarnings = new List<string>();
|
||||
if (calculableJobs.Count < results.Count)
|
||||
{
|
||||
aggregateWarnings.Add(
|
||||
$"{results.Count - calculableJobs.Count} of {results.Count} print jobs have missing cost data. " +
|
||||
"Total cost reflects only jobs with complete data.");
|
||||
}
|
||||
|
||||
var response = new SpoolCostResponse
|
||||
{
|
||||
SpoolId = request.SpoolId,
|
||||
SpoolSerial = firstResult.SpoolSerial,
|
||||
PurchasePrice = firstResult.PurchasePrice,
|
||||
WeightTotalGrams = firstResult.WeightTotalGrams,
|
||||
CostPerGram = firstResult.CostPerGram,
|
||||
TotalGramsConsumed = results.Sum(r => r.GramsDerived),
|
||||
TotalCost = totalCost,
|
||||
JobCount = results.Count,
|
||||
Jobs = jobResponses,
|
||||
Warnings = aggregateWarnings
|
||||
};
|
||||
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maps a domain CostPerPrintResult to an API CostPerPrintResponse DTO.
|
||||
/// </summary>
|
||||
private static CostPerPrintResponse MapCostToResponse(CostPerPrintResult r) => new()
|
||||
{
|
||||
PrintJobId = r.PrintJobId,
|
||||
PrintName = r.PrintName,
|
||||
SpoolId = r.SpoolId,
|
||||
SpoolSerial = r.SpoolSerial,
|
||||
MmExtruded = r.MmExtruded,
|
||||
GramsDerived = r.GramsDerived,
|
||||
PurchasePrice = r.PurchasePrice,
|
||||
WeightTotalGrams = r.WeightTotalGrams,
|
||||
CostPerGram = r.CostPerGram,
|
||||
CostPerPrint = r.CostPerPrint,
|
||||
Warnings = r.Warnings
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
using Extrudex.API.DTOs;
|
||||
using Extrudex.API.DTOs.Filaments;
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
@@ -17,16 +18,22 @@ namespace Extrudex.API.Controllers;
|
||||
public class FilamentsController : ControllerBase
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
private readonly ILowStockDetector _lowStockDetector;
|
||||
private readonly ILogger<FilamentsController> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FilamentsController"/> class.
|
||||
/// </summary>
|
||||
/// <param name="dbContext">The database context for data access.</param>
|
||||
/// <param name="lowStockDetector">The low-stock detection service for filament alerts.</param>
|
||||
/// <param name="logger">The logger for diagnostic output.</param>
|
||||
public FilamentsController(ExtrudexDbContext dbContext, ILogger<FilamentsController> logger)
|
||||
public FilamentsController(
|
||||
ExtrudexDbContext dbContext,
|
||||
ILowStockDetector lowStockDetector,
|
||||
ILogger<FilamentsController> logger)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_lowStockDetector = lowStockDetector;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
@@ -40,15 +47,18 @@ public class FilamentsController : ControllerBase
|
||||
/// <response code="200">Returns the paginated list of filament spools.</response>
|
||||
[HttpGet]
|
||||
[ProducesResponseType(typeof(PagedResponse<FilamentResponse>), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
public async Task<ActionResult<PagedResponse<FilamentResponse>>> GetFilaments(
|
||||
[FromQuery] FilamentQueryParameters query)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Getting filaments: pageNumber={PageNumber}, pageSize={PageSize}, " +
|
||||
"materialBaseId={MaterialBaseId}, materialFinishId={MaterialFinishId}, " +
|
||||
"materialModifierId={MaterialModifierId}, brand={Brand}, isActive={IsActive}",
|
||||
"materialModifierId={MaterialModifierId}, brand={Brand}, isActive={IsActive}, " +
|
||||
"includeArchived={IncludeArchived}, storageLocation={StorageLocation}",
|
||||
query.PageNumber, query.PageSize, query.MaterialBaseId,
|
||||
query.MaterialFinishId, query.MaterialModifierId, query.Brand, query.IsActive);
|
||||
query.MaterialFinishId, query.MaterialModifierId, query.Brand, query.IsActive,
|
||||
query.IncludeArchived, query.StorageLocation);
|
||||
|
||||
// Clamp pagination values
|
||||
var pageNumber = Math.Max(1, query.PageNumber);
|
||||
@@ -77,13 +87,22 @@ public class FilamentsController : ControllerBase
|
||||
if (query.IsActive.HasValue)
|
||||
spoolQuery = spoolQuery.Where(s => s.IsActive == query.IsActive.Value);
|
||||
|
||||
// Exclude archived spools by default; include when explicitly requested
|
||||
if (query.IncludeArchived != true)
|
||||
spoolQuery = spoolQuery.Where(s => !s.IsArchived);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query.StorageLocation))
|
||||
spoolQuery = spoolQuery.Where(s =>
|
||||
s.StorageLocation != null &&
|
||||
s.StorageLocation.ToLower().Contains(query.StorageLocation.ToLower()));
|
||||
|
||||
var totalCount = await spoolQuery.CountAsync();
|
||||
|
||||
var items = await spoolQuery
|
||||
.OrderByDescending(s => s.CreatedAt)
|
||||
.Skip((pageNumber - 1) * pageSize)
|
||||
.Take(pageSize)
|
||||
.Select(s => MapToFilamentResponse(s))
|
||||
.Select(s => MapToFilamentResponse(s, _lowStockDetector))
|
||||
.ToListAsync();
|
||||
|
||||
var response = new PagedResponse<FilamentResponse>
|
||||
@@ -124,7 +143,7 @@ public class FilamentsController : ControllerBase
|
||||
return NotFound(new { error = $"Filament with ID '{id}' not found." });
|
||||
}
|
||||
|
||||
return Ok(MapToFilamentResponse(spool));
|
||||
return Ok(MapToFilamentResponse(spool, _lowStockDetector));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -185,7 +204,9 @@ public class FilamentsController : ControllerBase
|
||||
SpoolSerial = request.SpoolSerial,
|
||||
PurchasePrice = request.PurchasePrice,
|
||||
PurchaseDate = request.PurchaseDate,
|
||||
IsActive = request.IsActive
|
||||
IsActive = request.IsActive,
|
||||
IsArchived = request.IsArchived,
|
||||
StorageLocation = request.StorageLocation
|
||||
};
|
||||
|
||||
_dbContext.Spools.Add(entity);
|
||||
@@ -197,7 +218,7 @@ public class FilamentsController : ControllerBase
|
||||
if (entity.MaterialModifierId.HasValue)
|
||||
await _dbContext.Entry(entity).Reference(s => s.MaterialModifier).LoadAsync();
|
||||
|
||||
var response = MapToFilamentResponse(entity);
|
||||
var response = MapToFilamentResponse(entity, _lowStockDetector);
|
||||
return CreatedAtAction(nameof(GetFilament), new { id = entity.Id }, response);
|
||||
}
|
||||
|
||||
@@ -267,6 +288,8 @@ public class FilamentsController : ControllerBase
|
||||
entity.PurchasePrice = request.PurchasePrice;
|
||||
entity.PurchaseDate = request.PurchaseDate;
|
||||
entity.IsActive = request.IsActive;
|
||||
entity.IsArchived = request.IsArchived;
|
||||
entity.StorageLocation = request.StorageLocation;
|
||||
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
@@ -276,7 +299,97 @@ public class FilamentsController : ControllerBase
|
||||
if (entity.MaterialModifierId.HasValue)
|
||||
await _dbContext.Entry(entity).Reference(s => s.MaterialModifier).LoadAsync();
|
||||
|
||||
return Ok(MapToFilamentResponse(entity));
|
||||
return Ok(MapToFilamentResponse(entity, _lowStockDetector));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets only the filament spools that are flagged as low stock.
|
||||
/// A spool is considered low stock when its remaining weight percentage
|
||||
/// is at or below the configured threshold.
|
||||
/// </summary>
|
||||
/// <returns>A list of low-stock filament spools with alert metadata.</returns>
|
||||
/// <response code="200">Returns the list of low-stock filament spools.</response>
|
||||
[HttpGet("low-stock")]
|
||||
[ProducesResponseType(typeof(List<FilamentResponse>), StatusCodes.Status200OK)]
|
||||
public async Task<ActionResult<List<FilamentResponse>>> GetLowStockFilaments()
|
||||
{
|
||||
_logger.LogDebug("Getting low-stock filaments (threshold: {Threshold}%)",
|
||||
_lowStockDetector.LowStockThresholdPercent);
|
||||
|
||||
var spools = await _dbContext.Spools
|
||||
.Include(s => s.MaterialBase)
|
||||
.Include(s => s.MaterialFinish)
|
||||
.Include(s => s.MaterialModifier)
|
||||
.Where(s => s.IsActive)
|
||||
.OrderByDescending(s => s.CreatedAt)
|
||||
.ToListAsync();
|
||||
|
||||
var lowStockItems = spools
|
||||
.Where(s => _lowStockDetector.IsLowStock(s.WeightRemainingGrams, s.WeightTotalGrams))
|
||||
.Select(s => MapToFilamentResponse(s, _lowStockDetector))
|
||||
.ToList();
|
||||
|
||||
return Ok(lowStockItems);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a filament spool by its unique identifier.
|
||||
/// If the spool has associated print jobs, the deletion is rejected with a 409 Conflict
|
||||
/// to preserve COGS and print history — the caller should archive the spool instead.
|
||||
/// Associated filament usage records are removed before the spool is deleted.
|
||||
/// AMS slots referencing this spool will have their SpoolId set to null by the database.
|
||||
/// </summary>
|
||||
/// <param name="id">The unique identifier of the filament spool to delete.</param>
|
||||
/// <returns>No content on successful deletion.</returns>
|
||||
/// <response code="204">The filament spool was successfully deleted.</response>
|
||||
/// <response code="404">If the filament spool with the given ID is not found.</response>
|
||||
/// <response code="409">If the spool has associated print jobs and cannot be deleted.</response>
|
||||
[HttpDelete("{id:guid}")]
|
||||
[ProducesResponseType(StatusCodes.Status204NoContent)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
[ProducesResponseType(StatusCodes.Status409Conflict)]
|
||||
public async Task<IActionResult> DeleteFilament(Guid id)
|
||||
{
|
||||
_logger.LogInformation("Deleting filament {Id}", id);
|
||||
|
||||
var entity = await _dbContext.Spools.FindAsync(id);
|
||||
if (entity is null)
|
||||
{
|
||||
_logger.LogWarning("Filament {Id} not found for deletion", id);
|
||||
return NotFound(new { error = $"Filament with ID '{id}' not found." });
|
||||
}
|
||||
|
||||
// Check for associated print jobs — these cannot be orphaned
|
||||
var hasPrintJobs = await _dbContext.PrintJobs.AnyAsync(pj => pj.SpoolId == id);
|
||||
if (hasPrintJobs)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Cannot delete filament {Id}: associated print jobs exist. Suggest archiving instead.", id);
|
||||
return Conflict(new
|
||||
{
|
||||
error = $"Cannot delete filament '{id}' because it has associated print jobs. " +
|
||||
"Archive the filament instead to preserve print history and COGS data."
|
||||
});
|
||||
}
|
||||
|
||||
// Remove associated filament usage records (usage tracking data for this spool)
|
||||
var usageRecords = await _dbContext.FilamentUsages
|
||||
.Where(fu => fu.SpoolId == id)
|
||||
.ToListAsync();
|
||||
|
||||
if (usageRecords.Count > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Removing {Count} filament usage records for spool {Id}",
|
||||
usageRecords.Count, id);
|
||||
_dbContext.FilamentUsages.RemoveRange(usageRecords);
|
||||
}
|
||||
|
||||
_dbContext.Spools.Remove(entity);
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
_logger.LogInformation("Filament {Id} deleted successfully", id);
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
// ── Mapping helper ─────────────────────────────────────────
|
||||
@@ -285,10 +398,12 @@ public class FilamentsController : ControllerBase
|
||||
/// Maps a Spool domain entity to a FilamentResponse DTO.
|
||||
/// Denormalizes material names for display convenience.
|
||||
/// Populates the QrCodeUrl for easy frontend access to the spool's QR code.
|
||||
/// Calculates low-stock status and remaining weight percentage.
|
||||
/// </summary>
|
||||
/// <param name="s">The spool entity to map.</param>
|
||||
/// <returns>A FilamentResponse DTO with denormalized material names and QR code URL.</returns>
|
||||
private static FilamentResponse MapToFilamentResponse(Spool s) => new()
|
||||
/// <param name="lowStockDetector">The low-stock detection service for computing alert flags.</param>
|
||||
/// <returns>A FilamentResponse DTO with denormalized material names, QR code URL, and low-stock metadata.</returns>
|
||||
private static FilamentResponse MapToFilamentResponse(Spool s, ILowStockDetector lowStockDetector) => new()
|
||||
{
|
||||
Id = s.Id,
|
||||
MaterialBaseId = s.MaterialBaseId,
|
||||
@@ -307,8 +422,12 @@ public class FilamentsController : ControllerBase
|
||||
PurchasePrice = s.PurchasePrice,
|
||||
PurchaseDate = s.PurchaseDate,
|
||||
IsActive = s.IsActive,
|
||||
IsArchived = s.IsArchived,
|
||||
StorageLocation = s.StorageLocation,
|
||||
CreatedAt = s.CreatedAt,
|
||||
UpdatedAt = s.UpdatedAt,
|
||||
QrCodeUrl = $"/api/qr/spool/{s.Id}"
|
||||
QrCodeUrl = $"/api/qr/spool/{s.Id}",
|
||||
IsLowStock = lowStockDetector.IsLowStock(s.WeightRemainingGrams, s.WeightTotalGrams),
|
||||
RemainingWeightPercent = lowStockDetector.GetRemainingWeightPercent(s.WeightRemainingGrams, s.WeightTotalGrams)
|
||||
};
|
||||
}
|
||||
@@ -413,6 +413,92 @@ public class PrintJobsController : ControllerBase
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
// ── GET /api/printjobs/{id}/cost-summary ──────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Gets the material cost summary for a specific print job.
|
||||
/// Calculates total material cost from filament usage (grams derived)
|
||||
/// and the spool's purchase price. Returns warnings instead of errors
|
||||
/// when cost data is unavailable.
|
||||
/// </summary>
|
||||
/// <param name="id">The unique identifier of the print job.</param>
|
||||
/// <returns>A cost summary with breakdown and any warnings about missing data.</returns>
|
||||
/// <response code="200">Returns the cost summary. Warnings field lists any missing data.</response>
|
||||
/// <response code="404">If the print job with the given ID is not found.</response>
|
||||
[HttpGet("{id:guid}/cost-summary")]
|
||||
[ProducesResponseType(typeof(CostSummaryResponse), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<CostSummaryResponse>> GetCostSummary(Guid id)
|
||||
{
|
||||
_logger.LogDebug("Getting cost summary for print job {Id}", id);
|
||||
|
||||
var job = await _dbContext.PrintJobs
|
||||
.Include(j => j.Spool)
|
||||
.ThenInclude(s => s!.MaterialBase)
|
||||
.FirstOrDefaultAsync(j => j.Id == id);
|
||||
|
||||
if (job is null)
|
||||
{
|
||||
_logger.LogWarning("Print job {Id} not found for cost summary", id);
|
||||
return NotFound(new { error = $"Print job with ID '{id}' not found." });
|
||||
}
|
||||
|
||||
var warnings = new List<string>();
|
||||
var spool = job.Spool;
|
||||
|
||||
// Build response with what we have
|
||||
var response = new CostSummaryResponse
|
||||
{
|
||||
PrintJobId = job.Id,
|
||||
PrintName = job.PrintName,
|
||||
SpoolId = job.SpoolId,
|
||||
SpoolSerial = spool?.SpoolSerial ?? string.Empty,
|
||||
SpoolBrand = spool?.Brand ?? string.Empty,
|
||||
SpoolColorName = spool?.ColorName ?? string.Empty,
|
||||
MmExtruded = job.MmExtruded,
|
||||
GramsDerived = job.GramsDerived,
|
||||
SpoolPurchasePrice = spool?.PurchasePrice,
|
||||
SpoolWeightTotalGrams = spool?.WeightTotalGrams,
|
||||
StoredCostPerPrint = job.CostPerPrint
|
||||
};
|
||||
|
||||
// Validate spool data availability
|
||||
if (spool is null)
|
||||
{
|
||||
warnings.Add("Spool data is not available for this print job. Cost cannot be calculated.");
|
||||
response.Warnings = warnings;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Check if we can calculate cost
|
||||
if (!spool.PurchasePrice.HasValue)
|
||||
{
|
||||
warnings.Add("Spool purchase price is not set. Cost per gram and total material cost cannot be calculated.");
|
||||
}
|
||||
|
||||
if (spool.WeightTotalGrams <= 0)
|
||||
{
|
||||
warnings.Add("Spool total weight is zero or invalid. Cost per gram and total material cost cannot be calculated.");
|
||||
}
|
||||
|
||||
// If we have enough data, calculate the cost
|
||||
if (spool.PurchasePrice.HasValue && spool.WeightTotalGrams > 0)
|
||||
{
|
||||
var pricePerGram = spool.PurchasePrice.Value / spool.WeightTotalGrams;
|
||||
response.PricePerGram = Math.Round(pricePerGram, 4);
|
||||
response.TotalMaterialCost = Math.Round(job.GramsDerived * pricePerGram, 4);
|
||||
}
|
||||
|
||||
// Warn if grams derived is zero but mm extruded is non-zero
|
||||
if (job.GramsDerived == 0 && job.MmExtruded > 0)
|
||||
{
|
||||
warnings.Add("GramsDerived is zero despite MmExtruded being non-zero. Cost may be inaccurate. Consider re-deriving grams from filament parameters.");
|
||||
}
|
||||
|
||||
response.Warnings = warnings;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// ── Gram Derivation Formula ────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
|
||||
117
backend/API/Controllers/UsageLogsController.cs
Normal file
117
backend/API/Controllers/UsageLogsController.cs
Normal file
@@ -0,0 +1,117 @@
|
||||
using Extrudex.API.DTOs.UsageLogs;
|
||||
using Extrudex.Domain.Enums;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace Extrudex.API.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// API controller for recording and querying filament usage logs.
|
||||
/// Usage logs provide a fine-grained audit trail of filament consumption
|
||||
/// from printer integrations or manual input.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Route("api/[controller]")]
|
||||
[Produces("application/json")]
|
||||
public class UsageLogsController : ControllerBase
|
||||
{
|
||||
private readonly IUsageLogService _usageLogService;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="UsageLogsController"/> class.
|
||||
/// </summary>
|
||||
/// <param name="usageLogService">The usage log service for recording and querying usage.</param>
|
||||
public UsageLogsController(IUsageLogService usageLogService)
|
||||
{
|
||||
_usageLogService = usageLogService;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a new filament usage entry.
|
||||
/// </summary>
|
||||
/// <param name="request">The usage entry details.</param>
|
||||
/// <returns>The created usage log entry.</returns>
|
||||
[HttpPost]
|
||||
[ProducesResponseType(typeof(UsageLogResponse), StatusCodes.Status201Created)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
public async Task<ActionResult<UsageLogResponse>> Create([FromBody] CreateUsageLogRequest request)
|
||||
{
|
||||
if (!Enum.TryParse<DataSource>(request.DataSource, ignoreCase: true, out var dataSource))
|
||||
{
|
||||
return BadRequest($"Invalid data source: '{request.DataSource}'. Valid values: Mqtt, Moonraker, Manual.");
|
||||
}
|
||||
|
||||
var entry = await _usageLogService.RecordUsageAsync(
|
||||
spoolId: request.SpoolId,
|
||||
gramsUsed: request.GramsUsed,
|
||||
dataSource: dataSource,
|
||||
printerId: request.PrinterId,
|
||||
printJobId: request.PrintJobId,
|
||||
mmExtruded: request.MmExtruded,
|
||||
usageTimestamp: request.UsageTimestamp,
|
||||
notes: request.Notes
|
||||
);
|
||||
|
||||
return CreatedAtAction(
|
||||
nameof(GetBySpool),
|
||||
new { spoolId = entry.SpoolId },
|
||||
MapToResponse(entry));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets usage logs for a specific spool, ordered by most recent first.
|
||||
/// </summary>
|
||||
/// <param name="spoolId">The spool ID to filter by.</param>
|
||||
/// <returns>A collection of usage log entries for the spool.</returns>
|
||||
[HttpGet("spool/{spoolId:guid}")]
|
||||
[ProducesResponseType(typeof(IEnumerable<UsageLogResponse>), StatusCodes.Status200OK)]
|
||||
public async Task<ActionResult<IEnumerable<UsageLogResponse>>> GetBySpool(Guid spoolId)
|
||||
{
|
||||
var logs = await _usageLogService.GetBySpoolAsync(spoolId);
|
||||
return Ok(logs.Select(MapToResponse));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets usage logs for a specific printer, ordered by most recent first.
|
||||
/// </summary>
|
||||
/// <param name="printerId">The printer ID to filter by.</param>
|
||||
/// <returns>A collection of usage log entries for the printer.</returns>
|
||||
[HttpGet("printer/{printerId:guid}")]
|
||||
[ProducesResponseType(typeof(IEnumerable<UsageLogResponse>), StatusCodes.Status200OK)]
|
||||
public async Task<ActionResult<IEnumerable<UsageLogResponse>>> GetByPrinter(Guid printerId)
|
||||
{
|
||||
var logs = await _usageLogService.GetByPrinterAsync(printerId);
|
||||
return Ok(logs.Select(MapToResponse));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets usage logs for a specific print job, ordered by most recent first.
|
||||
/// </summary>
|
||||
/// <param name="printJobId">The print job ID to filter by.</param>
|
||||
/// <returns>A collection of usage log entries for the print job.</returns>
|
||||
[HttpGet("print-job/{printJobId:guid}")]
|
||||
[ProducesResponseType(typeof(IEnumerable<UsageLogResponse>), StatusCodes.Status200OK)]
|
||||
public async Task<ActionResult<IEnumerable<UsageLogResponse>>> GetByPrintJob(Guid printJobId)
|
||||
{
|
||||
var logs = await _usageLogService.GetByPrintJobAsync(printJobId);
|
||||
return Ok(logs.Select(MapToResponse));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maps a UsageLog domain entity to a UsageLogResponse DTO.
|
||||
/// </summary>
|
||||
private static UsageLogResponse MapToResponse(Domain.Entities.UsageLog log) => new()
|
||||
{
|
||||
Id = log.Id,
|
||||
SpoolId = log.SpoolId,
|
||||
PrinterId = log.PrinterId,
|
||||
PrintJobId = log.PrintJobId,
|
||||
GramsUsed = log.GramsUsed,
|
||||
MmExtruded = log.MmExtruded,
|
||||
UsageTimestamp = log.UsageTimestamp,
|
||||
DataSource = log.DataSource.ToString(),
|
||||
Notes = log.Notes,
|
||||
CreatedAt = log.CreatedAt,
|
||||
UpdatedAt = log.UpdatedAt
|
||||
};
|
||||
}
|
||||
@@ -59,6 +59,12 @@ public class FilamentResponse
|
||||
/// <summary>Whether the spool is currently active and available.</summary>
|
||||
public bool IsActive { get; set; }
|
||||
|
||||
/// <summary>Whether the spool has been archived (removed from active inventory).</summary>
|
||||
public bool IsArchived { get; set; }
|
||||
|
||||
/// <summary>Physical storage location (e.g., "Shelf A", "Drawer 3"). Null if unset.</summary>
|
||||
public string? StorageLocation { get; set; }
|
||||
|
||||
/// <summary>Timestamp when this record was created (UTC).</summary>
|
||||
public DateTime CreatedAt { get; set; }
|
||||
|
||||
@@ -70,6 +76,19 @@ public class FilamentResponse
|
||||
/// Encodes a deep link to the spool's detail page.
|
||||
/// </summary>
|
||||
public string QrCodeUrl { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether this spool is flagged as low stock — remaining weight is at or
|
||||
/// below the configured low-stock threshold percentage.
|
||||
/// Useful for UI alerts and inventory dashboards.
|
||||
/// </summary>
|
||||
public bool IsLowStock { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Remaining filament weight as a percentage of total weight (0–100).
|
||||
/// Rounded to one decimal place. Returns 0 if total weight is zero.
|
||||
/// </summary>
|
||||
public decimal RemainingWeightPercent { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -133,6 +152,15 @@ public class CreateFilamentRequest
|
||||
|
||||
/// <summary>Whether the spool is active. Defaults to true.</summary>
|
||||
public bool IsActive { get; set; } = true;
|
||||
|
||||
/// <summary>Whether the spool is archived. Defaults to false.
|
||||
/// </summary>
|
||||
public bool IsArchived { get; set; } = false;
|
||||
|
||||
/// <summary>Physical storage location (e.g., "Shelf A", "Drawer 3"). Optional.
|
||||
/// </summary>
|
||||
[StringLength(200, ErrorMessage = "StorageLocation must not exceed 200 characters.")]
|
||||
public string? StorageLocation { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -196,4 +224,11 @@ public class UpdateFilamentRequest
|
||||
|
||||
/// <summary>Whether the spool is active.</summary>
|
||||
public bool IsActive { get; set; } = true;
|
||||
|
||||
/// <summary>Whether the spool is archived. Defaults to false.</summary>
|
||||
public bool IsArchived { get; set; } = false;
|
||||
|
||||
/// <summary>Physical storage location (e.g., "Shelf A", "Drawer 3"). Optional.</summary>
|
||||
[StringLength(200, ErrorMessage = "StorageLocation must not exceed 200 characters.")]
|
||||
public string? StorageLocation { get; set; }
|
||||
}
|
||||
@@ -30,4 +30,11 @@ public class FilamentQueryParameters
|
||||
|
||||
/// <summary>Optional filter by active status. True = active only, False = inactive only.</summary>
|
||||
public bool? IsActive { get; set; }
|
||||
|
||||
/// <summary>Whether to include archived spools in results. Defaults to false (excludes archived).
|
||||
/// </summary>
|
||||
public bool? IncludeArchived { get; set; }
|
||||
|
||||
/// <summary>Optional filter by storage location (case-insensitive partial match).</summary>
|
||||
public string? StorageLocation { get; set; }
|
||||
}
|
||||
99
backend/API/DTOs/PrintJobs/CostPerPrintDtos.cs
Normal file
99
backend/API/DTOs/PrintJobs/CostPerPrintDtos.cs
Normal file
@@ -0,0 +1,99 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace Extrudex.API.DTOs.PrintJobs;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for cost-per-print calculation. Contains the full cost
|
||||
/// breakdown and any warnings about missing or incomplete data.
|
||||
/// </summary>
|
||||
public class CostPerPrintResponse
|
||||
{
|
||||
/// <summary>The print job identifier this result belongs to.</summary>
|
||||
public Guid PrintJobId { get; set; }
|
||||
|
||||
/// <summary>Human-readable name of the print job.</summary>
|
||||
public string PrintName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>The spool identifier that provided filament.</summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>Serial number of the spool.</summary>
|
||||
public string SpoolSerial { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Total millimeters of filament extruded.</summary>
|
||||
public decimal MmExtruded { get; set; }
|
||||
|
||||
/// <summary>Derived grams consumed for this print.</summary>
|
||||
public decimal GramsDerived { get; set; }
|
||||
|
||||
/// <summary>The spool's purchase price. Null if not recorded.</summary>
|
||||
public decimal? PurchasePrice { get; set; }
|
||||
|
||||
/// <summary>The spool's total weight in grams when full.</summary>
|
||||
public decimal? WeightTotalGrams { get; set; }
|
||||
|
||||
/// <summary>Cost per gram of filament. Null if purchase price or total weight is missing.</summary>
|
||||
public decimal? CostPerGram { get; set; }
|
||||
|
||||
/// <summary>Calculated cost of this print job. Null if cost data is incomplete.</summary>
|
||||
public decimal? CostPerPrint { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Warnings about missing or incomplete data. Empty when all data is available
|
||||
/// and the calculation succeeded.
|
||||
/// </summary>
|
||||
public List<string> Warnings { get; set; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request DTO for batch cost calculation by spool. Returns cost breakdowns
|
||||
/// for all print jobs associated with the specified spool.
|
||||
/// </summary>
|
||||
public class SpoolCostRequest
|
||||
{
|
||||
/// <summary>The unique identifier of the spool to calculate costs for.</summary>
|
||||
[Required(ErrorMessage = "SpoolId is required.")]
|
||||
public Guid SpoolId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for spool-level cost calculation. Contains cost breakdowns
|
||||
/// for all print jobs on the spool, plus a total cost summary.
|
||||
/// </summary>
|
||||
public class SpoolCostResponse
|
||||
{
|
||||
/// <summary>The spool identifier.</summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>Serial number of the spool.</summary>
|
||||
public string SpoolSerial { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>The spool's purchase price. Null if not recorded.</summary>
|
||||
public decimal? PurchasePrice { get; set; }
|
||||
|
||||
/// <summary>The spool's total weight in grams when full.</summary>
|
||||
public decimal? WeightTotalGrams { get; set; }
|
||||
|
||||
/// <summary>Cost per gram of filament. Null if cost data is incomplete.</summary>
|
||||
public decimal? CostPerGram { get; set; }
|
||||
|
||||
/// <summary>Total grams consumed across all print jobs on this spool.</summary>
|
||||
public decimal TotalGramsConsumed { get; set; }
|
||||
|
||||
/// <summary>Total calculated cost across all print jobs. Null if any job has missing data.</summary>
|
||||
public decimal? TotalCost { get; set; }
|
||||
|
||||
/// <summary>Number of print jobs included in this calculation.</summary>
|
||||
public int JobCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual cost breakdowns per print job. Jobs with missing data
|
||||
/// will have null cost fields and populated warnings.
|
||||
/// </summary>
|
||||
public List<CostPerPrintResponse> Jobs { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Aggregate warnings about missing data across all jobs.
|
||||
/// </summary>
|
||||
public List<string> Warnings { get; set; } = new();
|
||||
}
|
||||
55
backend/API/DTOs/PrintJobs/CostSummaryResponse.cs
Normal file
55
backend/API/DTOs/PrintJobs/CostSummaryResponse.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
namespace Extrudex.API.DTOs.PrintJobs;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for the cost summary of a print job.
|
||||
/// Provides a breakdown of material cost based on filament usage
|
||||
/// and spool pricing data. If cost data is incomplete, warnings
|
||||
/// are returned instead of throwing an error.
|
||||
/// </summary>
|
||||
public class CostSummaryResponse
|
||||
{
|
||||
/// <summary>Unique identifier of the print job.</summary>
|
||||
public Guid PrintJobId { get; set; }
|
||||
|
||||
/// <summary>Human-readable name of the print job.</summary>
|
||||
public string PrintName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Foreign key to the spool used for this print job.</summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>Serial number of the spool.</summary>
|
||||
public string SpoolSerial { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Brand of the spool.</summary>
|
||||
public string SpoolBrand { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Color name of the spool.</summary>
|
||||
public string SpoolColorName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Total millimeters of filament extruded during this print.</summary>
|
||||
public decimal MmExtruded { get; set; }
|
||||
|
||||
/// <summary>Derived grams consumed for this print job.</summary>
|
||||
public decimal GramsDerived { get; set; }
|
||||
|
||||
/// <summary>Purchase price of the full spool, if available.</summary>
|
||||
public decimal? SpoolPurchasePrice { get; set; }
|
||||
|
||||
/// <summary>Total weight of the spool in grams when full.</summary>
|
||||
public decimal? SpoolWeightTotalGrams { get; set; }
|
||||
|
||||
/// <summary>Calculated price per gram (purchase price / total weight), if available.</summary>
|
||||
public decimal? PricePerGram { get; set; }
|
||||
|
||||
/// <summary>Calculated total material cost for this print job, if available.</summary>
|
||||
public decimal? TotalMaterialCost { get; set; }
|
||||
|
||||
/// <summary>The CostPerPrint stored on the print job entity, if set.</summary>
|
||||
public decimal? StoredCostPerPrint { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Warnings about missing data that prevent cost calculation.
|
||||
/// Empty if all data is available and cost was calculated successfully.
|
||||
/// </summary>
|
||||
public List<string> Warnings { get; set; } = new();
|
||||
}
|
||||
115
backend/API/DTOs/UsageLogs/UsageLogDtos.cs
Normal file
115
backend/API/DTOs/UsageLogs/UsageLogDtos.cs
Normal file
@@ -0,0 +1,115 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace Extrudex.API.DTOs.UsageLogs;
|
||||
|
||||
/// <summary>
|
||||
/// Request DTO for recording a filament usage entry.
|
||||
/// </summary>
|
||||
public class CreateUsageLogRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The ID of the spool that provided the filament.
|
||||
/// </summary>
|
||||
[Required]
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The number of grams of filament consumed.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Range(0.01, double.MaxValue, ErrorMessage = "GramsUsed must be a positive value.")]
|
||||
public decimal GramsUsed { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The source of the usage data (Mqtt, Moonraker, Manual).
|
||||
/// </summary>
|
||||
[Required]
|
||||
public string DataSource { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// The ID of the printer that consumed the filament. Optional.
|
||||
/// </summary>
|
||||
public Guid? PrinterId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The ID of the print job associated with this usage. Optional.
|
||||
/// </summary>
|
||||
public Guid? PrintJobId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The number of millimeters of filament extruded. Optional.
|
||||
/// </summary>
|
||||
public decimal? MmExtruded { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the usage occurred (UTC). Defaults to now if not specified.
|
||||
/// </summary>
|
||||
public DateTime? UsageTimestamp { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional notes about this usage entry.
|
||||
/// </summary>
|
||||
[MaxLength(2000)]
|
||||
public string? Notes { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for a usage log entry.
|
||||
/// </summary>
|
||||
public class UsageLogResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for the usage log entry.
|
||||
/// </summary>
|
||||
public Guid Id { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The spool that provided the filament.
|
||||
/// </summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The printer that consumed the filament, if applicable.
|
||||
/// </summary>
|
||||
public Guid? PrinterId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The print job associated with this usage, if applicable.
|
||||
/// </summary>
|
||||
public Guid? PrintJobId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Grams of filament consumed.
|
||||
/// </summary>
|
||||
public decimal GramsUsed { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Millimeters of filament extruded, if available.
|
||||
/// </summary>
|
||||
public decimal? MmExtruded { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the usage occurred (UTC).
|
||||
/// </summary>
|
||||
public DateTime UsageTimestamp { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the usage data (Mqtt, Moonraker, Manual).
|
||||
/// </summary>
|
||||
public string DataSource { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Optional notes about this usage entry.
|
||||
/// </summary>
|
||||
public string? Notes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the record was created (UTC).
|
||||
/// </summary>
|
||||
public DateTime CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When the record was last updated (UTC).
|
||||
/// </summary>
|
||||
public DateTime UpdatedAt { get; set; }
|
||||
}
|
||||
69
backend/API/Filters/FluentValidationFilter.cs
Normal file
69
backend/API/Filters/FluentValidationFilter.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
using FluentValidation;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Mvc.Filters;
|
||||
|
||||
namespace Extrudex.API.Filters;
|
||||
|
||||
/// <summary>
|
||||
/// Action filter that automatically validates request DTOs using FluentValidation
|
||||
/// validators registered in DI. Runs before the controller action executes.
|
||||
/// Returns 400 Bad Request with validation errors if validation fails.
|
||||
/// </summary>
|
||||
public class FluentValidationFilter : IAsyncActionFilter
|
||||
{
|
||||
private readonly IServiceProvider _serviceProvider;
|
||||
private readonly ILogger<FluentValidationFilter> _logger;
|
||||
|
||||
public FluentValidationFilter(IServiceProvider serviceProvider, ILogger<FluentValidationFilter> logger)
|
||||
{
|
||||
_serviceProvider = serviceProvider;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task OnActionExecutionAsync(ActionExecutingContext context, ActionExecutionDelegate next)
|
||||
{
|
||||
foreach (var argument in context.ActionArguments.Values)
|
||||
{
|
||||
if (argument is null) continue;
|
||||
|
||||
var argumentType = argument.GetType();
|
||||
var validatorType = typeof(IValidator<>).MakeGenericType(argumentType);
|
||||
|
||||
// Try to resolve a validator for this argument type
|
||||
var validator = _serviceProvider.GetService(validatorType) as IValidator;
|
||||
if (validator is null) continue;
|
||||
|
||||
_logger.LogDebug("Validating {Type} with {Validator}", argumentType.Name, validator.GetType().Name);
|
||||
|
||||
var validationResult = await validator.ValidateAsync(
|
||||
new ValidationContext<object>(argument), context.HttpContext.RequestAborted);
|
||||
|
||||
if (!validationResult.IsValid)
|
||||
{
|
||||
foreach (var error in validationResult.Errors)
|
||||
{
|
||||
context.ModelState.AddModelError(error.PropertyName, error.ErrorMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!context.ModelState.IsValid)
|
||||
{
|
||||
var errors = context.ModelState
|
||||
.Where(kvp => kvp.Value?.Errors.Count > 0)
|
||||
.ToDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value!.Errors.Select(e => e.ErrorMessage).ToArray());
|
||||
|
||||
context.Result = new BadRequestObjectResult(new
|
||||
{
|
||||
title = "Validation failed",
|
||||
status = 400,
|
||||
errors
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await next();
|
||||
}
|
||||
}
|
||||
79
backend/API/Jobs/FilamentUsageSyncJob.cs
Normal file
79
backend/API/Jobs/FilamentUsageSyncJob.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Configuration;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace Extrudex.API.Jobs;
|
||||
|
||||
/// <summary>
|
||||
/// Background job that periodically syncs filament usage data from
|
||||
/// Moonraker printers. Runs as a hosted service and polls all active
|
||||
/// Moonraker printers on a configurable interval to persist usage
|
||||
/// data to the Extrudex database.
|
||||
///
|
||||
/// Configuration is bound from the "FilamentUsageSync" section in
|
||||
/// appsettings.json. Set Enabled=false to disable without removing
|
||||
/// the service registration.
|
||||
/// </summary>
|
||||
public class FilamentUsageSyncJob : BackgroundService
|
||||
{
|
||||
private readonly IFilamentUsageSyncService _syncService;
|
||||
private readonly FilamentUsageSyncOptions _options;
|
||||
private readonly ILogger<FilamentUsageSyncJob> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new FilamentUsageSyncJob.
|
||||
/// </summary>
|
||||
/// <param name="syncService">The service that performs the actual sync logic.</param>
|
||||
/// <param name="options">Configuration options for polling interval and timeouts.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public FilamentUsageSyncJob(
|
||||
IFilamentUsageSyncService syncService,
|
||||
IOptions<FilamentUsageSyncOptions> options,
|
||||
ILogger<FilamentUsageSyncJob> logger)
|
||||
{
|
||||
_syncService = syncService;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogInformation("Filament usage sync job is disabled via configuration — exiting");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Filament usage sync job starting — polling every {Interval}",
|
||||
_options.PollingInterval);
|
||||
|
||||
// Delay briefly on startup to allow the web host to fully initialize
|
||||
await Task.Delay(TimeSpan.FromSeconds(10), stoppingToken);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
var syncedCount = await _syncService.SyncAllAsync(stoppingToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Filament usage sync completed — {SyncedCount} printer(s) synced. Next sync in {Interval}",
|
||||
syncedCount, _options.PollingInterval);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Error during filament usage sync cycle — will retry in {Interval}",
|
||||
_options.PollingInterval);
|
||||
}
|
||||
|
||||
await Task.Delay(_options.PollingInterval, stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Filament usage sync job shutting down");
|
||||
}
|
||||
}
|
||||
80
backend/API/Jobs/MoonrakerPrinterSyncJob.cs
Normal file
80
backend/API/Jobs/MoonrakerPrinterSyncJob.cs
Normal file
@@ -0,0 +1,80 @@
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Configuration;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace Extrudex.API.Jobs;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that periodically syncs Moonraker printer status
|
||||
/// and print job history into the Extrudex database. Runs as a hosted
|
||||
/// service and polls all active Moonraker printers on a configurable
|
||||
/// interval to update printer state and map completed print jobs
|
||||
/// to PrintJob and FilamentUsage entities.
|
||||
///
|
||||
/// Configuration is bound from the "MoonrakerPrinterSync" section in
|
||||
/// appsettings.json. Set Enabled=false to disable without removing
|
||||
/// the service registration.
|
||||
/// </summary>
|
||||
public class MoonrakerPrinterSyncJob : BackgroundService
|
||||
{
|
||||
private readonly IMoonrakerPrinterSyncService _syncService;
|
||||
private readonly MoonrakerPrinterSyncOptions _options;
|
||||
private readonly ILogger<MoonrakerPrinterSyncJob> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new MoonrakerPrinterSyncJob.
|
||||
/// </summary>
|
||||
/// <param name="syncService">The service that performs the actual sync logic.</param>
|
||||
/// <param name="options">Configuration options for polling interval and timeouts.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public MoonrakerPrinterSyncJob(
|
||||
IMoonrakerPrinterSyncService syncService,
|
||||
IOptions<MoonrakerPrinterSyncOptions> options,
|
||||
ILogger<MoonrakerPrinterSyncJob> logger)
|
||||
{
|
||||
_syncService = syncService;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogInformation("Moonraker printer sync job is disabled via configuration — exiting");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Moonraker printer sync job starting — polling every {Interval}",
|
||||
_options.PollingInterval);
|
||||
|
||||
// Delay briefly on startup to allow the web host to fully initialize
|
||||
await Task.Delay(TimeSpan.FromSeconds(15), stoppingToken);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
var syncedCount = await _syncService.SyncAllAsync(stoppingToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Moonraker printer sync completed — {SyncedCount} printer(s) synced. Next sync in {Interval}",
|
||||
syncedCount, _options.PollingInterval);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Error during Moonraker printer sync cycle — will retry in {Interval}",
|
||||
_options.PollingInterval);
|
||||
}
|
||||
|
||||
await Task.Delay(_options.PollingInterval, stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Moonraker printer sync job shutting down");
|
||||
}
|
||||
}
|
||||
126
backend/API/Validators/FilamentValidators.cs
Normal file
126
backend/API/Validators/FilamentValidators.cs
Normal file
@@ -0,0 +1,126 @@
|
||||
using Extrudex.API.DTOs.Filaments;
|
||||
using FluentValidation;
|
||||
|
||||
namespace Extrudex.API.Validators;
|
||||
|
||||
/// <summary>
|
||||
/// Validation rules for creating a Filament (Spool) via the /filaments route.
|
||||
/// Mirrors the domain rules enforced in the controller and ensures consistent
|
||||
/// validation regardless of the request pipeline entry point.
|
||||
/// </summary>
|
||||
public class CreateFilamentRequestValidator : AbstractValidator<CreateFilamentRequest>
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes validation rules for <see cref="CreateFilamentRequest"/>.
|
||||
/// </summary>
|
||||
public CreateFilamentRequestValidator()
|
||||
{
|
||||
RuleFor(x => x.MaterialBaseId)
|
||||
.NotEmpty().WithMessage("MaterialBaseId is required.");
|
||||
|
||||
RuleFor(x => x.MaterialFinishId)
|
||||
.NotEmpty().WithMessage("MaterialFinishId is required.");
|
||||
|
||||
RuleFor(x => x.Brand)
|
||||
.NotEmpty().WithMessage("Brand is required.")
|
||||
.MaximumLength(200).WithMessage("Brand must not exceed 200 characters.");
|
||||
|
||||
RuleFor(x => x.ColorName)
|
||||
.NotEmpty().WithMessage("ColorName is required.")
|
||||
.MaximumLength(200).WithMessage("ColorName must not exceed 200 characters.");
|
||||
|
||||
RuleFor(x => x.ColorHex)
|
||||
.NotEmpty().WithMessage("ColorHex is required.")
|
||||
.Matches(@"^#[0-9A-Fa-f]{6}$").WithMessage("ColorHex must be a valid hex color code (e.g., #FF0000).");
|
||||
|
||||
RuleFor(x => x.WeightTotalGrams)
|
||||
.GreaterThan(0).WithMessage("Total weight must be greater than zero.");
|
||||
|
||||
RuleFor(x => x.WeightRemainingGrams)
|
||||
.GreaterThanOrEqualTo(0).WithMessage("Remaining weight must be non-negative.");
|
||||
|
||||
RuleFor(x => x.WeightRemainingGrams)
|
||||
.LessThanOrEqualTo(x => x.WeightTotalGrams)
|
||||
.WithMessage("WeightRemainingGrams cannot exceed WeightTotalGrams.");
|
||||
|
||||
RuleFor(x => x.FilamentDiameterMm)
|
||||
.GreaterThan(0).WithMessage("Filament diameter must be greater than zero.");
|
||||
|
||||
RuleFor(x => x.SpoolSerial)
|
||||
.NotEmpty().WithMessage("SpoolSerial is required.")
|
||||
.MaximumLength(200).WithMessage("SpoolSerial must not exceed 200 characters.");
|
||||
|
||||
When(x => x.PurchasePrice.HasValue, () =>
|
||||
{
|
||||
RuleFor(x => x.PurchasePrice!.Value)
|
||||
.GreaterThanOrEqualTo(0).WithMessage("Purchase price must be non-negative.");
|
||||
});
|
||||
|
||||
When(x => x.StorageLocation != null, () =>
|
||||
{
|
||||
RuleFor(x => x.StorageLocation!)
|
||||
.MaximumLength(200).WithMessage("StorageLocation must not exceed 200 characters.");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validation rules for updating a Filament (Spool) via the /filaments route.
|
||||
/// Enforces the same domain rules as creation, plus ensures the updated
|
||||
/// WeightRemainingGrams does not exceed the updated WeightTotalGrams.
|
||||
/// </summary>
|
||||
public class UpdateFilamentRequestValidator : AbstractValidator<UpdateFilamentRequest>
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes validation rules for <see cref="UpdateFilamentRequest"/>.
|
||||
/// </summary>
|
||||
public UpdateFilamentRequestValidator()
|
||||
{
|
||||
RuleFor(x => x.MaterialBaseId)
|
||||
.NotEmpty().WithMessage("MaterialBaseId is required.");
|
||||
|
||||
RuleFor(x => x.MaterialFinishId)
|
||||
.NotEmpty().WithMessage("MaterialFinishId is required.");
|
||||
|
||||
RuleFor(x => x.Brand)
|
||||
.NotEmpty().WithMessage("Brand is required.")
|
||||
.MaximumLength(200).WithMessage("Brand must not exceed 200 characters.");
|
||||
|
||||
RuleFor(x => x.ColorName)
|
||||
.NotEmpty().WithMessage("ColorName is required.")
|
||||
.MaximumLength(200).WithMessage("ColorName must not exceed 200 characters.");
|
||||
|
||||
RuleFor(x => x.ColorHex)
|
||||
.NotEmpty().WithMessage("ColorHex is required.")
|
||||
.Matches(@"^#[0-9A-Fa-f]{6}$").WithMessage("ColorHex must be a valid hex color code (e.g., #FF0000).");
|
||||
|
||||
RuleFor(x => x.WeightTotalGrams)
|
||||
.GreaterThan(0).WithMessage("Total weight must be greater than zero.");
|
||||
|
||||
RuleFor(x => x.WeightRemainingGrams)
|
||||
.GreaterThanOrEqualTo(0).WithMessage("Remaining weight must be non-negative.");
|
||||
|
||||
RuleFor(x => x.WeightRemainingGrams)
|
||||
.LessThanOrEqualTo(x => x.WeightTotalGrams)
|
||||
.WithMessage("WeightRemainingGrams cannot exceed WeightTotalGrams.");
|
||||
|
||||
RuleFor(x => x.FilamentDiameterMm)
|
||||
.GreaterThan(0).WithMessage("Filament diameter must be greater than zero.");
|
||||
|
||||
RuleFor(x => x.SpoolSerial)
|
||||
.NotEmpty().WithMessage("SpoolSerial is required.")
|
||||
.MaximumLength(200).WithMessage("SpoolSerial must not exceed 200 characters.");
|
||||
|
||||
When(x => x.PurchasePrice.HasValue, () =>
|
||||
{
|
||||
RuleFor(x => x.PurchasePrice!.Value)
|
||||
.GreaterThanOrEqualTo(0).WithMessage("Purchase price must be non-negative.");
|
||||
});
|
||||
|
||||
When(x => x.StorageLocation != null, () =>
|
||||
{
|
||||
RuleFor(x => x.StorageLocation!)
|
||||
.MaximumLength(200).WithMessage("StorageLocation must not exceed 200 characters.");
|
||||
});
|
||||
}
|
||||
}
|
||||
25
backend/Dockerfile
Normal file
25
backend/Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
# Build stage
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy go mod files first for caching
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
# Copy source and build
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o server ./cmd/server
|
||||
|
||||
# Final stage
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add ca-certificates
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy binary from builder
|
||||
COPY --from=builder /app/server .
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["./server"]
|
||||
19
backend/Domain/DTOs/Moonraker/MoonrakerDisplayStatus.cs
Normal file
19
backend/Domain/DTOs/Moonraker/MoonrakerDisplayStatus.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for Moonraker /printer/objects/query?display_status endpoint.
|
||||
/// Contains progress percentage and message for the current print job.
|
||||
/// Used by the SignalR hub to push real-time progress to connected clients.
|
||||
/// </summary>
|
||||
public class MoonrakerDisplayStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Print progress as a decimal between 0 and 1 (0% to 100%).
|
||||
/// </summary>
|
||||
public decimal Progress { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Status message displayed on the printer LCD (e.g., "Printing...", "Heating...").
|
||||
/// </summary>
|
||||
public string Message { get; set; } = string.Empty;
|
||||
}
|
||||
20
backend/Domain/DTOs/Moonraker/MoonrakerHistoryResponse.cs
Normal file
20
backend/Domain/DTOs/Moonraker/MoonrakerHistoryResponse.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for the Moonraker /server/history/items endpoint.
|
||||
/// Wraps the paginated list of print job history items.
|
||||
/// </summary>
|
||||
public class MoonrakerHistoryResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The list of print job history items returned by Moonraker.
|
||||
/// Most recent jobs appear first (descending by start time).
|
||||
/// </summary>
|
||||
public List<MoonrakerPrintJob> Items { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Total number of print jobs available on the server
|
||||
/// (for pagination; the Items list may be a subset).
|
||||
/// </summary>
|
||||
public int TotalCount { get; set; }
|
||||
}
|
||||
56
backend/Domain/DTOs/Moonraker/MoonrakerPrintJob.cs
Normal file
56
backend/Domain/DTOs/Moonraker/MoonrakerPrintJob.cs
Normal file
@@ -0,0 +1,56 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for a single Moonraker print job history item.
|
||||
/// Maps to the objects returned by /server/history/items.
|
||||
/// Contains filament usage, duration, and status for a completed or active print.
|
||||
/// </summary>
|
||||
public class MoonrakerPrintJob
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique Moonraker job identifier (e.g., "000001").
|
||||
/// </summary>
|
||||
public string JobId { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Filename of the G-code file that was printed.
|
||||
/// </summary>
|
||||
public string Filename { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Current status of this print job: "completed", "cancelled", "error", "in_progress".
|
||||
/// </summary>
|
||||
public string Status { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Total filament used in millimeters for this print job.
|
||||
/// This is the primary measurement; grams are derived from this value.
|
||||
/// </summary>
|
||||
public decimal FilamentUsedMm { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total print duration in seconds.
|
||||
/// </summary>
|
||||
public decimal PrintDurationSeconds { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total print duration including setup and warmup, in seconds.
|
||||
/// </summary>
|
||||
public decimal TotalDurationSeconds { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the print job started (UTC).
|
||||
/// </summary>
|
||||
public DateTime? StartTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the print job ended (UTC). Null if still in progress.
|
||||
/// </summary>
|
||||
public DateTime? EndTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Metadata dictionary from Moonraker. May contain filament_type,
|
||||
/// filament_name, nozzle_diameter, and other slicer-provided fields.
|
||||
/// </summary>
|
||||
public Dictionary<string, object> Metadata { get; set; } = new();
|
||||
}
|
||||
36
backend/Domain/DTOs/Moonraker/MoonrakerPrintStats.cs
Normal file
36
backend/Domain/DTOs/Moonraker/MoonrakerPrintStats.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for Moonraker /printer/objects/query?print_stats endpoint.
|
||||
/// Contains real-time print statistics including current job state,
|
||||
/// filament consumed, and file being printed.
|
||||
/// </summary>
|
||||
public class MoonrakerPrintStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Current print state: "standby", "printing", "paused", "complete", "error", "cancelled".
|
||||
/// </summary>
|
||||
public string State { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Total filament used in millimeters for the current print session.
|
||||
/// </summary>
|
||||
public decimal FilamentUsedMm { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total print duration in seconds for the current print session.
|
||||
/// </summary>
|
||||
public decimal PrintDurationSeconds { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Filename of the G-code file currently being printed.
|
||||
/// Null if no print is active.
|
||||
/// </summary>
|
||||
public string? Filename { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Detailed message from Klipper about the current print state.
|
||||
/// May contain error details when state is "error".
|
||||
/// </summary>
|
||||
public string? Message { get; set; }
|
||||
}
|
||||
26
backend/Domain/DTOs/Moonraker/MoonrakerPrinterInfo.cs
Normal file
26
backend/Domain/DTOs/Moonraker/MoonrakerPrinterInfo.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for the Moonraker /printer/info endpoint.
|
||||
/// Contains the current operational state of the Klipper printer.
|
||||
/// Used to determine whether the printer is idle, printing, paused, or in error.
|
||||
/// </summary>
|
||||
public class MoonrakerPrinterInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Current Klipper state: "ready", "startup", "shutdown", "error", "cancelled".
|
||||
/// A "ready" state means the printer is connected and idle.
|
||||
/// </summary>
|
||||
public string State { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Detailed state message from Klipper. May contain error details
|
||||
/// when the state is "error" or "shutdown".
|
||||
/// </summary>
|
||||
public string StateMessage { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the Klipper firmware is currently connected and responsive.
|
||||
/// </summary>
|
||||
public bool KlippyReady { get; set; }
|
||||
}
|
||||
25
backend/Domain/DTOs/Moonraker/MoonrakerRequest.cs
Normal file
25
backend/Domain/DTOs/Moonraker/MoonrakerRequest.cs
Normal file
@@ -0,0 +1,25 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Request DTO for querying the Moonraker API.
|
||||
/// Encapsulates the connection parameters needed to reach a specific
|
||||
/// Moonraker instance on a Klipper-based printer.
|
||||
/// </summary>
|
||||
public class MoonrakerRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Hostname or IP address of the Moonraker printer.
|
||||
/// </summary>
|
||||
public string HostnameOrIp { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Port number for the Moonraker API. Default: 7125.
|
||||
/// </summary>
|
||||
public int Port { get; set; } = 7125;
|
||||
|
||||
/// <summary>
|
||||
/// Optional API key for authenticating with Moonraker.
|
||||
/// Required when the server has API key authentication enabled.
|
||||
/// </summary>
|
||||
public string? ApiKey { get; set; }
|
||||
}
|
||||
44
backend/Domain/DTOs/Moonraker/MoonrakerServerInfo.cs
Normal file
44
backend/Domain/DTOs/Moonraker/MoonrakerServerInfo.cs
Normal file
@@ -0,0 +1,44 @@
|
||||
namespace Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for the Moonraker /server/info endpoint.
|
||||
/// Contains server identification and operational state.
|
||||
/// Used to verify connectivity and determine Moonraker version.
|
||||
/// </summary>
|
||||
public class MoonrakerServerInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// The hostname of the Moonraker server (e.g., "mainsail").
|
||||
/// </summary>
|
||||
public string Hostname { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Moonraker software version string (e.g., "0.8.0-89ee464").
|
||||
/// </summary>
|
||||
public string SoftwareVersion { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// CPU model string reported by the host system.
|
||||
/// </summary>
|
||||
public string CpuInfo { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether Klipper is currently connected to the MCU.
|
||||
/// </summary>
|
||||
public bool KlippyConnected { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The current Klipper state (e.g., "ready", "startup", "error").
|
||||
/// </summary>
|
||||
public string KlippyState { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the Moonraker API requires an authentication token.
|
||||
/// </summary>
|
||||
public bool ApiKeyRequired { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// List of registered Moonraker plugin names.
|
||||
/// </summary>
|
||||
public List<string> Plugins { get; set; } = [];
|
||||
}
|
||||
73
backend/Domain/Entities/FilamentUsage.cs
Normal file
73
backend/Domain/Entities/FilamentUsage.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using Extrudex.Domain.Base;
|
||||
|
||||
namespace Extrudex.Domain.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks filament consumption for a specific print job on a specific spool.
|
||||
/// Each record captures the grams used, which printer consumed it, and when the
|
||||
/// usage was recorded. This enables granular per-job usage analytics, COGS
|
||||
/// reconciliation, and spool weight depletion tracking.
|
||||
///
|
||||
/// A single PrintJob may have multiple FilamentUsage records if multiple spools
|
||||
/// were consumed (e.g., multi-material prints via AMS).
|
||||
/// </summary>
|
||||
public class FilamentUsage : AuditableEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Foreign key to the print job that consumed this filament.
|
||||
/// A usage record is always tied to a print job.
|
||||
/// </summary>
|
||||
public Guid PrintJobId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the print job that consumed this filament.
|
||||
/// </summary>
|
||||
public PrintJob PrintJob { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Foreign key to the spool (filament) that provided the material.
|
||||
/// Links usage back to the specific physical spool for inventory tracking.
|
||||
/// </summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the spool that provided the material.
|
||||
/// </summary>
|
||||
public Spool Spool { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Foreign key to the printer that executed the print job.
|
||||
/// Denormalized from PrintJob for direct querying of per-printer usage.
|
||||
/// </summary>
|
||||
public Guid PrinterId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the printer that executed the print job.
|
||||
/// </summary>
|
||||
public Printer Printer { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Grams of filament consumed during this print job.
|
||||
/// Derived from mm_extruded × cross_section_area × material_density,
|
||||
/// or measured directly from AMS weight delta.
|
||||
/// </summary>
|
||||
public decimal GramsUsed { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Millimeters of filament extruded for this usage record.
|
||||
/// The primary physical measurement; grams_used is derived from this.
|
||||
/// </summary>
|
||||
public decimal MmExtruded { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when this usage record was created (UTC).
|
||||
/// Represents when the usage was first logged, which may differ from
|
||||
/// the print job's started_at or completed_at timestamps.
|
||||
/// </summary>
|
||||
public DateTime RecordedAt { get; set; } = DateTime.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// Optional notes about this usage record (e.g., "AMS tray 3", "manual weight check").
|
||||
/// </summary>
|
||||
public string? Notes { get; set; }
|
||||
}
|
||||
@@ -97,4 +97,10 @@ public class PrintJob : AuditableEntity
|
||||
/// Optional notes about the print job (e.g., "First layer adhesion issues").
|
||||
/// </summary>
|
||||
public string? Notes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation collection of filament usage records for this print job.
|
||||
/// Enables tracking granular per-spool consumption within a print.
|
||||
/// </summary>
|
||||
public ICollection<FilamentUsage> FilamentUsages { get; set; } = new List<FilamentUsage>();
|
||||
}
|
||||
@@ -94,4 +94,10 @@ public class Printer : AuditableEntity
|
||||
/// Navigation collection of print jobs executed on this printer.
|
||||
/// </summary>
|
||||
public ICollection<PrintJob> PrintJobs { get; set; } = new List<PrintJob>();
|
||||
|
||||
/// <summary>
|
||||
/// Navigation collection of filament usage records tracking consumption on this printer.
|
||||
/// Enables querying per-printer filament usage and COGS.
|
||||
/// </summary>
|
||||
public ICollection<FilamentUsage> FilamentUsages { get; set; } = new List<FilamentUsage>();
|
||||
}
|
||||
@@ -93,6 +93,20 @@ public class Spool : AuditableEntity
|
||||
/// </summary>
|
||||
public bool IsActive { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the spool has been archived (removed from active inventory).
|
||||
/// Archived spools are retained for historical records but hidden from
|
||||
/// default inventory views. Distinguishes long-term archival from
|
||||
/// temporary inactivity (e.g., spool swapped out of AMS).
|
||||
/// </summary>
|
||||
public bool IsArchived { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Physical storage location of the spool (e.g., "Shelf A", "Drawer 3", "AMS Tray 2").
|
||||
/// Optional — not every spool has a designated storage location.
|
||||
/// </summary>
|
||||
public string? StorageLocation { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation collection of AMS slots where this spool is loaded.
|
||||
/// </summary>
|
||||
@@ -102,4 +116,10 @@ public class Spool : AuditableEntity
|
||||
/// Navigation collection of print jobs that consumed filament from this spool.
|
||||
/// </summary>
|
||||
public ICollection<PrintJob> PrintJobs { get; set; } = new List<PrintJob>();
|
||||
|
||||
/// <summary>
|
||||
/// Navigation collection of filament usage records tracking consumption from this spool.
|
||||
/// Enables querying how much filament was consumed per print job.
|
||||
/// </summary>
|
||||
public ICollection<FilamentUsage> FilamentUsages { get; set; } = new List<FilamentUsage>();
|
||||
}
|
||||
72
backend/Domain/Entities/UsageLog.cs
Normal file
72
backend/Domain/Entities/UsageLog.cs
Normal file
@@ -0,0 +1,72 @@
|
||||
using Extrudex.Domain.Base;
|
||||
using Extrudex.Domain.Enums;
|
||||
|
||||
namespace Extrudex.Domain.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single filament usage log entry. Records how much filament
|
||||
/// was consumed, by which printer, at what time, and optionally linked to
|
||||
/// a print job. This provides a fine-grained audit trail of filament consumption
|
||||
/// independent of print job lifecycle.
|
||||
/// </summary>
|
||||
public class UsageLog : AuditableEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Foreign key to the spool that provided the filament.
|
||||
/// </summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the spool that provided the filament.
|
||||
/// </summary>
|
||||
public Spool Spool { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Foreign key to the printer that consumed the filament.
|
||||
/// Nullable to support manual entries without a specific printer.
|
||||
/// </summary>
|
||||
public Guid? PrinterId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the printer that consumed the filament.
|
||||
/// </summary>
|
||||
public Printer? Printer { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Foreign key to the print job associated with this usage entry.
|
||||
/// Nullable because usage can be logged before or without a print job.
|
||||
/// </summary>
|
||||
public Guid? PrintJobId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Navigation to the print job associated with this usage entry.
|
||||
/// </summary>
|
||||
public PrintJob? PrintJob { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The number of grams of filament consumed in this usage event.
|
||||
/// </summary>
|
||||
public decimal GramsUsed { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The number of millimeters of filament extruded in this usage event.
|
||||
/// Optional — may not be available for all data sources.
|
||||
/// </summary>
|
||||
public decimal? MmExtruded { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the usage occurred (UTC). This is the actual time of
|
||||
/// consumption, which may differ from CreatedAt if the entry was recorded later.
|
||||
/// </summary>
|
||||
public DateTime UsageTimestamp { get; set; } = DateTime.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// The source of the usage data (which integration path provided it).
|
||||
/// </summary>
|
||||
public DataSource DataSource { get; set; } = DataSource.Manual;
|
||||
|
||||
/// <summary>
|
||||
/// Optional notes about this usage entry.
|
||||
/// </summary>
|
||||
public string? Notes { get; set; }
|
||||
}
|
||||
76
backend/Domain/Interfaces/ICostPerPrintService.cs
Normal file
76
backend/Domain/Interfaces/ICostPerPrintService.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Service interface for calculating the cost of goods sold (COGS) per print job.
|
||||
/// Uses the spool's purchase price and the print job's derived grams consumed
|
||||
/// to produce a cost breakdown. Handles missing cost data gracefully by returning
|
||||
/// warnings rather than throwing exceptions.
|
||||
/// </summary>
|
||||
public interface ICostPerPrintService
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates the cost per print for a specific print job.
|
||||
/// </summary>
|
||||
/// <param name="printJobId">The unique identifier of the print job.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
/// <returns>
|
||||
/// A <see cref="CostPerPrintResult"/> containing the cost breakdown,
|
||||
/// or warnings if cost data is missing or incomplete.
|
||||
/// </returns>
|
||||
Task<CostPerPrintResult> CalculateAsync(Guid printJobId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates cost breakdowns for all print jobs associated with a specific spool.
|
||||
/// Useful for spool-level COGS reporting.
|
||||
/// </summary>
|
||||
/// <param name="spoolId">The unique identifier of the spool.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
/// <returns>
|
||||
/// A list of <see cref="CostPerPrintResult"/> for each print job on the spool.
|
||||
/// Jobs with missing cost data will include warnings.
|
||||
/// </returns>
|
||||
Task<IReadOnlyList<CostPerPrintResult>> CalculateBySpoolAsync(Guid spoolId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a cost-per-print calculation. Contains the cost breakdown
|
||||
/// and any warnings about missing or incomplete cost data.
|
||||
/// </summary>
|
||||
public class CostPerPrintResult
|
||||
{
|
||||
/// <summary>The print job identifier this result belongs to.</summary>
|
||||
public Guid PrintJobId { get; set; }
|
||||
|
||||
/// <summary>Human-readable name of the print job.</summary>
|
||||
public string PrintName { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>The spool identifier that provided filament.</summary>
|
||||
public Guid SpoolId { get; set; }
|
||||
|
||||
/// <summary>Serial number of the spool.</summary>
|
||||
public string SpoolSerial { get; set; } = string.Empty;
|
||||
|
||||
/// <summary>Total millimeters of filament extruded.</summary>
|
||||
public decimal MmExtruded { get; set; }
|
||||
|
||||
/// <summary>Derived grams consumed for this print.</summary>
|
||||
public decimal GramsDerived { get; set; }
|
||||
|
||||
/// <summary>The spool's purchase price. Null if not recorded.</summary>
|
||||
public decimal? PurchasePrice { get; set; }
|
||||
|
||||
/// <summary>The spool's total weight in grams when full.</summary>
|
||||
public decimal? WeightTotalGrams { get; set; }
|
||||
|
||||
/// <summary>Cost per gram of filament. Null if purchase price or total weight is missing.</summary>
|
||||
public decimal? CostPerGram { get; set; }
|
||||
|
||||
/// <summary>Calculated cost of this print job. Null if cost data is incomplete.</summary>
|
||||
public decimal? CostPerPrint { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Warnings about missing or incomplete data that prevented a full calculation.
|
||||
/// Empty when all data is available and the calculation succeeded.
|
||||
/// </summary>
|
||||
public List<string> Warnings { get; set; } = new();
|
||||
}
|
||||
50
backend/Domain/Interfaces/IFilamentUsageService.cs
Normal file
50
backend/Domain/Interfaces/IFilamentUsageService.cs
Normal file
@@ -0,0 +1,50 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Service for persisting and querying filament usage records.
|
||||
/// Tracks consumption per print job and per spool for COGS and inventory tracking.
|
||||
/// </summary>
|
||||
public interface IFilamentUsageService
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a new filament usage entry for a print job.
|
||||
/// </summary>
|
||||
/// <param name="printJobId">The print job that consumed the filament.</param>
|
||||
/// <param name="spoolId">The spool that provided the filament.</param>
|
||||
/// <param name="printerId">The printer that executed the print.</param>
|
||||
/// <param name="gramsUsed">Grams of filament consumed.</param>
|
||||
/// <param name="mmExtruded">Millimeters of filament extruded.</param>
|
||||
/// <param name="notes">Optional notes about this usage record.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The created FilamentUsage entity.</returns>
|
||||
Task<FilamentUsage> RecordUsageAsync(
|
||||
Guid printJobId,
|
||||
Guid spoolId,
|
||||
Guid printerId,
|
||||
decimal gramsUsed,
|
||||
decimal mmExtruded,
|
||||
string? notes = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves all filament usage records for a specific print job.
|
||||
/// </summary>
|
||||
/// <param name="printJobId">The print job ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection of filament usage records for the print job.</returns>
|
||||
Task<IReadOnlyList<FilamentUsage>> GetByPrintJobAsync(
|
||||
Guid printJobId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves all filament usage records for a specific spool.
|
||||
/// </summary>
|
||||
/// <param name="spoolId">The spool ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection of filament usage records for the spool.</returns>
|
||||
Task<IReadOnlyList<FilamentUsage>> GetBySpoolAsync(
|
||||
Guid spoolId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
19
backend/Domain/Interfaces/IFilamentUsageSyncService.cs
Normal file
19
backend/Domain/Interfaces/IFilamentUsageSyncService.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Service interface for syncing filament usage data from printers
|
||||
/// into the Extrudex database. Handles querying Moonraker printers,
|
||||
/// computing derived usage metrics, and persisting updates to spools
|
||||
/// and print job records.
|
||||
/// </summary>
|
||||
public interface IFilamentUsageSyncService
|
||||
{
|
||||
/// <summary>
|
||||
/// Performs a single sync cycle: queries all active Moonraker printers,
|
||||
/// fetches their current filament usage data, and persists updates to
|
||||
/// the database.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token for graceful shutdown.</param>
|
||||
/// <returns>The number of printers successfully synced.</returns>
|
||||
Task<int> SyncAllAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
39
backend/Domain/Interfaces/ILowStockDetector.cs
Normal file
39
backend/Domain/Interfaces/ILowStockDetector.cs
Normal file
@@ -0,0 +1,39 @@
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Detects low-stock filament spools based on configurable weight thresholds.
|
||||
/// Determines whether a spool's remaining filament falls below a critical level
|
||||
/// so that alerts and API flags can be surfaced to the user.
|
||||
/// </summary>
|
||||
public interface ILowStockDetector
|
||||
{
|
||||
/// <summary>
|
||||
/// Determines whether a spool is considered low stock based on its remaining
|
||||
/// weight relative to its total weight and the configured threshold percentage.
|
||||
/// </summary>
|
||||
/// <param name="weightRemainingGrams">The current remaining weight in grams.</param>
|
||||
/// <param name="weightTotalGrams">The total spool weight in grams when full.</param>
|
||||
/// <returns>
|
||||
/// <c>true</c> if the remaining weight percentage is at or below the configured
|
||||
/// low-stock threshold; <c>false</c> otherwise. Returns <c>false</c> for spools
|
||||
/// with zero total weight to avoid division-by-zero.
|
||||
/// </returns>
|
||||
bool IsLowStock(decimal weightRemainingGrams, decimal weightTotalGrams);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the remaining weight as a percentage of total weight.
|
||||
/// </summary>
|
||||
/// <param name="weightRemainingGrams">The current remaining weight in grams.</param>
|
||||
/// <param name="weightTotalGrams">The total spool weight in grams when full.</param>
|
||||
/// <returns>
|
||||
/// A value between 0 and 100 representing the percentage of filament remaining.
|
||||
/// Returns 0 if total weight is zero to avoid division-by-zero.
|
||||
/// </returns>
|
||||
decimal GetRemainingWeightPercent(decimal weightRemainingGrams, decimal weightTotalGrams);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the currently configured low-stock threshold percentage.
|
||||
/// Useful for API responses so clients know what threshold is in effect.
|
||||
/// </summary>
|
||||
decimal LowStockThresholdPercent { get; }
|
||||
}
|
||||
131
backend/Domain/Interfaces/IMoonrakerClient.cs
Normal file
131
backend/Domain/Interfaces/IMoonrakerClient.cs
Normal file
@@ -0,0 +1,131 @@
|
||||
using Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Client interface for communicating with Moonraker REST API endpoints
|
||||
/// on Klipper-based printers (e.g., Elegoo Centauri Carbon).
|
||||
/// Provides strongly-typed methods for server discovery, printer status,
|
||||
/// print job history, and real-time telemetry.
|
||||
/// </summary>
|
||||
public interface IMoonrakerClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks whether the Moonraker server is reachable and responding.
|
||||
/// Calls the /server/info endpoint and returns the server information
|
||||
/// if successful, or null if the server is unreachable.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>Server info if reachable; <c>null</c> if unreachable.</returns>
|
||||
Task<MoonrakerServerInfo?> GetServerInfoAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks whether the Moonraker server is reachable and responding.
|
||||
/// This is a convenience method equivalent to calling GetServerInfoAsync
|
||||
/// and checking for a non-null result.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns><c>true</c> if the server responded successfully; otherwise <c>false</c>.</returns>
|
||||
Task<bool> IsReachableAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches the current printer info from the /printer/info endpoint.
|
||||
/// Returns the Klipper state and readiness status.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>Printer info if successful; <c>null</c> if the request failed.</returns>
|
||||
Task<MoonrakerPrinterInfo?> GetPrinterInfoAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches print job history from the /server/history/items endpoint.
|
||||
/// Returns the most recent print jobs with filament usage data,
|
||||
/// print duration, and completion status.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="limit">Maximum number of history items to return. Default: 50.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>History response with print jobs; empty list if request failed.</returns>
|
||||
Task<MoonrakerHistoryResponse> GetPrintHistoryAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
int limit = 50,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches the current print statistics from the /printer/objects/query endpoint.
|
||||
/// Returns real-time data including filament used, print duration,
|
||||
/// and current print state for the active or most recent print.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>Print stats if successful; <c>null</c> if the request failed.</returns>
|
||||
Task<MoonrakerPrintStats?> GetPrintStatsAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches the current display status from the /printer/objects/query endpoint.
|
||||
/// Returns progress percentage and status message for the active print.
|
||||
/// Used by SignalR to push real-time progress updates to connected clients.
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>Display status if successful; <c>null</c> if the request failed.</returns>
|
||||
Task<MoonrakerDisplayStatus?> GetDisplayStatusAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches the current filament usage data from the Moonraker server.
|
||||
/// Returns a dictionary of usage metrics reported by the printer.
|
||||
///
|
||||
/// <para>
|
||||
/// <b>Prefer GetPrintHistoryAsync or GetPrintStatsAsync for new code.</b>
|
||||
/// This method is retained for backward compatibility with the
|
||||
/// FilamentUsageSyncService and returns a dictionary of metric names
|
||||
/// to their decimal values for callers that don't need typed DTOs.
|
||||
/// </para>
|
||||
/// </summary>
|
||||
/// <param name="hostnameOrIp">The printer's hostname or IP address.</param>
|
||||
/// <param name="port">The Moonraker API port (default: 7125).</param>
|
||||
/// <param name="apiKey">Optional API key for authentication.</param>
|
||||
/// <param name="cancellationToken">Cancellation token for the HTTP request.</param>
|
||||
/// <returns>A dictionary of usage metric names to their decimal values.</returns>
|
||||
Task<Dictionary<string, decimal>> GetFilamentUsageAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
20
backend/Domain/Interfaces/IMoonrakerPrinterSyncService.cs
Normal file
20
backend/Domain/Interfaces/IMoonrakerPrinterSyncService.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using Extrudex.Domain.DTOs.Moonraker;
|
||||
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Service interface for syncing Moonraker printer data into the Extrudex database.
|
||||
/// Handles periodic polling of printer status and mapping print job history
|
||||
/// to PrintJob and FilamentUsage entities.
|
||||
/// </summary>
|
||||
public interface IMoonrakerPrinterSyncService
|
||||
{
|
||||
/// <summary>
|
||||
/// Performs a single sync cycle: queries all active Moonraker printers,
|
||||
/// fetches their current status and print job history, and persists
|
||||
/// updates to the database.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token for graceful shutdown.</param>
|
||||
/// <returns>The number of printers successfully synced.</returns>
|
||||
Task<int> SyncAllAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
57
backend/Domain/Interfaces/IUsageLogService.cs
Normal file
57
backend/Domain/Interfaces/IUsageLogService.cs
Normal file
@@ -0,0 +1,57 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Enums;
|
||||
|
||||
namespace Extrudex.Domain.Interfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Service for recording filament usage entries. Writes to the usage_logs table
|
||||
/// and provides query capabilities for usage history.
|
||||
/// </summary>
|
||||
public interface IUsageLogService
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a filament usage entry.
|
||||
/// </summary>
|
||||
/// <param name="spoolId">The spool that provided the filament.</param>
|
||||
/// <param name="gramsUsed">Grams of filament consumed.</param>
|
||||
/// <param name="dataSource">Where the data came from.</param>
|
||||
/// <param name="printerId">Optional printer ID.</param>
|
||||
/// <param name="printJobId">Optional print job ID.</param>
|
||||
/// <param name="mmExtruded">Optional mm extruded.</param>
|
||||
/// <param name="usageTimestamp">When the usage occurred (defaults to UTC now).</param>
|
||||
/// <param name="notes">Optional notes.</param>
|
||||
/// <returns>The created UsageLog entity.</returns>
|
||||
Task<UsageLog> RecordUsageAsync(
|
||||
Guid spoolId,
|
||||
decimal gramsUsed,
|
||||
DataSource dataSource,
|
||||
Guid? printerId = null,
|
||||
Guid? printJobId = null,
|
||||
decimal? mmExtruded = null,
|
||||
DateTime? usageTimestamp = null,
|
||||
string? notes = null);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves usage logs for a specific spool, ordered by usage timestamp descending.
|
||||
/// </summary>
|
||||
/// <param name="spoolId">The spool ID to filter by.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A collection of usage logs for the spool.</returns>
|
||||
Task<IEnumerable<UsageLog>> GetBySpoolAsync(Guid spoolId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves usage logs for a specific printer, ordered by usage timestamp descending.
|
||||
/// </summary>
|
||||
/// <param name="printerId">The printer ID to filter by.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A collection of usage logs for the printer.</returns>
|
||||
Task<IEnumerable<UsageLog>> GetByPrinterAsync(Guid printerId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves usage logs for a specific print job, ordered by usage timestamp descending.
|
||||
/// </summary>
|
||||
/// <param name="printJobId">The print job ID to filter by.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A collection of usage logs for the print job.</returns>
|
||||
Task<IEnumerable<UsageLog>> GetByPrintJobAsync(Guid printJobId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -9,6 +9,7 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AspNetCore.HealthChecks.NpgSql" Version="9.0.0" />
|
||||
<PackageReference Include="FluentValidation.DependencyInjectionExtensions" Version="12.1.1" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="9.0.3" />
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.3" />
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
namespace Extrudex.Infrastructure.Configuration;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the FilamentUsageSync background job.
|
||||
/// Bound from appsettings.json under the "FilamentUsageSync" section.
|
||||
/// Controls polling interval and per-printer timeout settings.
|
||||
/// </summary>
|
||||
public class FilamentUsageSyncOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// The section name in appsettings.json where these options are bound.
|
||||
/// </summary>
|
||||
public const string SectionName = "FilamentUsageSync";
|
||||
|
||||
/// <summary>
|
||||
/// How often the background job polls printers for usage data.
|
||||
/// Default: 5 minutes. Minimum recommended: 1 minute.
|
||||
/// </summary>
|
||||
public TimeSpan PollingInterval { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for individual HTTP requests to a Moonraker printer.
|
||||
/// Default: 30 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Whether the sync job is enabled. Set to false to disable
|
||||
/// the background job without removing its registration.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
namespace Extrudex.Infrastructure.Configuration;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the MoonrakerPrinterSync background service.
|
||||
/// Bound from appsettings.json under the "MoonrakerPrinterSync" section.
|
||||
/// Controls polling interval, timeouts, and feature toggles for the
|
||||
/// printer status and print job mapping service.
|
||||
/// </summary>
|
||||
public class MoonrakerPrinterSyncOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// The section name in appsettings.json where these options are bound.
|
||||
/// </summary>
|
||||
public const string SectionName = "MoonrakerPrinterSync";
|
||||
|
||||
/// <summary>
|
||||
/// How often the background service polls Moonraker printers for status
|
||||
/// and print job data. Default: 1 minute.
|
||||
/// </summary>
|
||||
public TimeSpan PollingInterval { get; set; } = TimeSpan.FromMinutes(1);
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for individual HTTP requests to a Moonraker printer.
|
||||
/// Default: 15 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(15);
|
||||
|
||||
/// <summary>
|
||||
/// Whether the Moonraker printer sync service is enabled.
|
||||
/// Set to false to disable without removing the service registration.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of print history items to fetch per printer per sync cycle.
|
||||
/// Controls the batch size when syncing print jobs from Moonraker.
|
||||
/// Default: 25.
|
||||
/// </summary>
|
||||
public int HistoryBatchSize { get; set; } = 25;
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Metadata.Builders;
|
||||
|
||||
namespace Extrudex.Infrastructure.Data.Configurations;
|
||||
|
||||
public class FilamentUsageConfiguration : BaseEntityConfiguration<FilamentUsage>
|
||||
{
|
||||
public override void Configure(EntityTypeBuilder<FilamentUsage> builder)
|
||||
{
|
||||
base.Configure(builder);
|
||||
|
||||
builder.Property(e => e.PrintJobId)
|
||||
.HasColumnName("print_job_id")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.SpoolId)
|
||||
.HasColumnName("spool_id")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.PrinterId)
|
||||
.HasColumnName("printer_id")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.GramsUsed)
|
||||
.HasColumnName("grams_used")
|
||||
.HasPrecision(10, 2)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.MmExtruded)
|
||||
.HasColumnName("mm_extruded")
|
||||
.HasPrecision(12, 2)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.RecordedAt)
|
||||
.HasColumnName("recorded_at")
|
||||
.HasDefaultValueSql("now() at time zone 'utc'")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.Notes)
|
||||
.HasColumnName("notes")
|
||||
.HasMaxLength(2000);
|
||||
|
||||
// Index on print_job_id for querying usage by print job
|
||||
builder.HasIndex(e => e.PrintJobId)
|
||||
.HasDatabaseName("ix_filament_usages_print_job_id");
|
||||
|
||||
// Index on spool_id for querying usage by spool (filament)
|
||||
builder.HasIndex(e => e.SpoolId)
|
||||
.HasDatabaseName("ix_filament_usages_spool_id");
|
||||
|
||||
// Index on printer_id for querying usage by printer
|
||||
builder.HasIndex(e => e.PrinterId)
|
||||
.HasDatabaseName("ix_filament_usages_printer_id");
|
||||
|
||||
// Index on recorded_at for time-range queries
|
||||
builder.HasIndex(e => e.RecordedAt)
|
||||
.HasDatabaseName("ix_filament_usages_recorded_at");
|
||||
|
||||
// Composite index for querying usage by spool within a date range
|
||||
builder.HasIndex(e => new { e.SpoolId, e.RecordedAt })
|
||||
.HasDatabaseName("ix_filament_usages_spool_id_recorded_at");
|
||||
|
||||
// Relationships
|
||||
builder.HasOne(e => e.PrintJob)
|
||||
.WithMany(e => e.FilamentUsages)
|
||||
.HasForeignKey(e => e.PrintJobId)
|
||||
.HasConstraintName("fk_filament_usages_print_job")
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
|
||||
builder.HasOne(e => e.Spool)
|
||||
.WithMany(e => e.FilamentUsages)
|
||||
.HasForeignKey(e => e.SpoolId)
|
||||
.HasConstraintName("fk_filament_usages_spool")
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
|
||||
builder.HasOne(e => e.Printer)
|
||||
.WithMany(e => e.FilamentUsages)
|
||||
.HasForeignKey(e => e.PrinterId)
|
||||
.HasConstraintName("fk_filament_usages_printer")
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
}
|
||||
}
|
||||
@@ -68,6 +68,15 @@ public class SpoolConfiguration : BaseEntityConfiguration<Spool>
|
||||
.HasDefaultValue(true)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.IsArchived)
|
||||
.HasColumnName("is_archived")
|
||||
.HasDefaultValue(false)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.StorageLocation)
|
||||
.HasColumnName("storage_location")
|
||||
.HasMaxLength(200);
|
||||
|
||||
// Unique index on spool_serial — critical for barcode/QR scanning
|
||||
builder.HasIndex(e => e.SpoolSerial)
|
||||
.IsUnique()
|
||||
@@ -89,6 +98,14 @@ public class SpoolConfiguration : BaseEntityConfiguration<Spool>
|
||||
builder.HasIndex(e => e.IsActive)
|
||||
.HasDatabaseName("ix_spools_is_active");
|
||||
|
||||
// Index on is_archived for inventory filtering (exclude archived from default views)
|
||||
builder.HasIndex(e => e.IsArchived)
|
||||
.HasDatabaseName("ix_spools_is_archived");
|
||||
|
||||
// Composite index on is_active + is_archived for common inventory queries
|
||||
builder.HasIndex(e => new { e.IsActive, e.IsArchived })
|
||||
.HasDatabaseName("ix_spools_active_archived");
|
||||
|
||||
// Relationships
|
||||
builder.HasOne(e => e.MaterialBase)
|
||||
.WithMany(e => e.Spools)
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Enums;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.EntityFrameworkCore.Metadata.Builders;
|
||||
|
||||
namespace Extrudex.Infrastructure.Data.Configurations;
|
||||
|
||||
/// <summary>
|
||||
/// EF Core configuration for the UsageLog entity.
|
||||
/// Maps to the usage_logs table with snake_case columns and appropriate indexes.
|
||||
/// </summary>
|
||||
public class UsageLogConfiguration : BaseEntityConfiguration<UsageLog>
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public override void Configure(EntityTypeBuilder<UsageLog> builder)
|
||||
{
|
||||
base.Configure(builder);
|
||||
|
||||
builder.Property(e => e.SpoolId)
|
||||
.HasColumnName("spool_id")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.PrinterId)
|
||||
.HasColumnName("printer_id");
|
||||
|
||||
builder.Property(e => e.PrintJobId)
|
||||
.HasColumnName("print_job_id");
|
||||
|
||||
builder.Property(e => e.GramsUsed)
|
||||
.HasColumnName("grams_used")
|
||||
.HasPrecision(10, 2)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.MmExtruded)
|
||||
.HasColumnName("mm_extruded")
|
||||
.HasPrecision(12, 2);
|
||||
|
||||
builder.Property(e => e.UsageTimestamp)
|
||||
.HasColumnName("usage_timestamp")
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.DataSource)
|
||||
.HasColumnName("data_source")
|
||||
.HasConversion<string>()
|
||||
.HasMaxLength(50)
|
||||
.IsRequired();
|
||||
|
||||
builder.Property(e => e.Notes)
|
||||
.HasColumnName("notes")
|
||||
.HasMaxLength(2000);
|
||||
|
||||
// Index on spool_id for querying usage by spool
|
||||
builder.HasIndex(e => e.SpoolId)
|
||||
.HasDatabaseName("ix_usage_logs_spool_id");
|
||||
|
||||
// Index on printer_id for querying usage by printer
|
||||
builder.HasIndex(e => e.PrinterId)
|
||||
.HasDatabaseName("ix_usage_logs_printer_id");
|
||||
|
||||
// Index on print_job_id for querying usage by print job
|
||||
builder.HasIndex(e => e.PrintJobId)
|
||||
.HasDatabaseName("ix_usage_logs_print_job_id");
|
||||
|
||||
// Index on usage_timestamp for chronological queries
|
||||
builder.HasIndex(e => e.UsageTimestamp)
|
||||
.HasDatabaseName("ix_usage_logs_usage_timestamp");
|
||||
|
||||
// Index on data_source for filtering by integration path
|
||||
builder.HasIndex(e => e.DataSource)
|
||||
.HasDatabaseName("ix_usage_logs_data_source");
|
||||
|
||||
// Relationships
|
||||
builder.HasOne(e => e.Spool)
|
||||
.WithMany()
|
||||
.HasForeignKey(e => e.SpoolId)
|
||||
.HasConstraintName("fk_usage_logs_spool")
|
||||
.OnDelete(DeleteBehavior.Restrict);
|
||||
|
||||
builder.HasOne(e => e.Printer)
|
||||
.WithMany()
|
||||
.HasForeignKey(e => e.PrinterId)
|
||||
.HasConstraintName("fk_usage_logs_printer")
|
||||
.OnDelete(DeleteBehavior.SetNull);
|
||||
|
||||
builder.HasOne(e => e.PrintJob)
|
||||
.WithMany()
|
||||
.HasForeignKey(e => e.PrintJobId)
|
||||
.HasConstraintName("fk_usage_logs_print_job")
|
||||
.OnDelete(DeleteBehavior.SetNull);
|
||||
}
|
||||
}
|
||||
@@ -23,6 +23,8 @@ public class ExtrudexDbContext : DbContext
|
||||
public DbSet<AmsUnit> AmsUnits => Set<AmsUnit>();
|
||||
public DbSet<AmsSlot> AmsSlots => Set<AmsSlot>();
|
||||
public DbSet<PrintJob> PrintJobs => Set<PrintJob>();
|
||||
public DbSet<FilamentUsage> FilamentUsages => Set<FilamentUsage>();
|
||||
public DbSet<UsageLog> UsageLogs => Set<UsageLog>();
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
|
||||
1068
backend/Infrastructure/Data/Migrations/20260426183433_AddFilamentUsageTrackingModel.Designer.cs
generated
Normal file
1068
backend/Infrastructure/Data/Migrations/20260426183433_AddFilamentUsageTrackingModel.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,533 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace Extrudex.Infrastructure.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddFilamentUsageTrackingModel : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.CreateTable(
|
||||
name: "filament_usages",
|
||||
columns: table => new
|
||||
{
|
||||
id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
print_job_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
spool_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
printer_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
grams_used = table.Column<decimal>(type: "numeric(10,2)", precision: 10, scale: 2, nullable: false),
|
||||
mm_extruded = table.Column<decimal>(type: "numeric(12,2)", precision: 12, scale: 2, nullable: false),
|
||||
recorded_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false, defaultValueSql: "now() at time zone 'utc'"),
|
||||
notes = table.Column<string>(type: "character varying(2000)", maxLength: 2000, nullable: true),
|
||||
created_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false, defaultValueSql: "now() at time zone 'utc'"),
|
||||
updated_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false, defaultValueSql: "now() at time zone 'utc'")
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_filament_usages", x => x.id);
|
||||
table.ForeignKey(
|
||||
name: "fk_filament_usages_print_job",
|
||||
column: x => x.print_job_id,
|
||||
principalTable: "print_jobs",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.Cascade);
|
||||
table.ForeignKey(
|
||||
name: "fk_filament_usages_printer",
|
||||
column: x => x.printer_id,
|
||||
principalTable: "printers",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.Restrict);
|
||||
table.ForeignKey(
|
||||
name: "fk_filament_usages_spool",
|
||||
column: x => x.spool_id,
|
||||
principalTable: "spools",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.Restrict);
|
||||
});
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9388), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9388) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9871), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9871) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9881), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9881) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9888), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9888) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9895), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9895) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9901), new DateTime(2026, 4, 26, 18, 34, 33, 291, DateTimeKind.Utc).AddTicks(9902) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(90), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(90) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(251), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(251) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(259), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(259) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(266), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(266) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(272), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(272) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(278), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(278) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(285), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(285) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(291), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(291) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(297), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(298) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(304), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(304) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(310), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(310) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000012"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(316), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(317) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000013"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(323), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(323) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000014"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(329), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(329) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000015"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(336), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(336) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(482), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(482) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(805), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(806) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(815), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(815) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(821), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(821) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(828), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(828) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(834), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(834) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(840), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(840) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(847), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(847) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(853), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(853) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(859), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(860) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(866), new DateTime(2026, 4, 26, 18, 34, 33, 292, DateTimeKind.Utc).AddTicks(866) });
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_filament_usages_print_job_id",
|
||||
table: "filament_usages",
|
||||
column: "print_job_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_filament_usages_printer_id",
|
||||
table: "filament_usages",
|
||||
column: "printer_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_filament_usages_recorded_at",
|
||||
table: "filament_usages",
|
||||
column: "recorded_at");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_filament_usages_spool_id",
|
||||
table: "filament_usages",
|
||||
column: "spool_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_filament_usages_spool_id_recorded_at",
|
||||
table: "filament_usages",
|
||||
columns: new[] { "spool_id", "recorded_at" });
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "filament_usages");
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1651), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1652) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2055), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2056) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000012"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000013"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000014"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2132), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2133) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000015"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2477), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2478) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2490), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2491) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2522), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2523) });
|
||||
}
|
||||
}
|
||||
}
|
||||
1061
backend/Infrastructure/Data/Migrations/20260426184329_AddUsageLogTable.Designer.cs
generated
Normal file
1061
backend/Infrastructure/Data/Migrations/20260426184329_AddUsageLogTable.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,534 @@
|
||||
using System;
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace Extrudex.Infrastructure.Data.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddUsageLogTable : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.CreateTable(
|
||||
name: "usage_logs",
|
||||
columns: table => new
|
||||
{
|
||||
id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
spool_id = table.Column<Guid>(type: "uuid", nullable: false),
|
||||
printer_id = table.Column<Guid>(type: "uuid", nullable: true),
|
||||
print_job_id = table.Column<Guid>(type: "uuid", nullable: true),
|
||||
grams_used = table.Column<decimal>(type: "numeric(10,2)", precision: 10, scale: 2, nullable: false),
|
||||
mm_extruded = table.Column<decimal>(type: "numeric(12,2)", precision: 12, scale: 2, nullable: true),
|
||||
usage_timestamp = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
|
||||
data_source = table.Column<string>(type: "character varying(50)", maxLength: 50, nullable: false),
|
||||
notes = table.Column<string>(type: "character varying(2000)", maxLength: 2000, nullable: true),
|
||||
created_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false, defaultValueSql: "now() at time zone 'utc'"),
|
||||
updated_at = table.Column<DateTime>(type: "timestamp with time zone", nullable: false, defaultValueSql: "now() at time zone 'utc'")
|
||||
},
|
||||
constraints: table =>
|
||||
{
|
||||
table.PrimaryKey("PK_usage_logs", x => x.id);
|
||||
table.ForeignKey(
|
||||
name: "fk_usage_logs_print_job",
|
||||
column: x => x.print_job_id,
|
||||
principalTable: "print_jobs",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.SetNull);
|
||||
table.ForeignKey(
|
||||
name: "fk_usage_logs_printer",
|
||||
column: x => x.printer_id,
|
||||
principalTable: "printers",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.SetNull);
|
||||
table.ForeignKey(
|
||||
name: "fk_usage_logs_spool",
|
||||
column: x => x.spool_id,
|
||||
principalTable: "spools",
|
||||
principalColumn: "id",
|
||||
onDelete: ReferentialAction.Restrict);
|
||||
});
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(6535), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(6535) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7016), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7016) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7027), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7028) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7034), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7035) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7042), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7042) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7049), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7049) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7291), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7292) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7453), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7453) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7461), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7461) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7468), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7468) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7474), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7474) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7480), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7481) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7487), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7487) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7493), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7493) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7500), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7500) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7507), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7507) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7513), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7513) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000012"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7519), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7520) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000013"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7526), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7526) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000014"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7532), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7532) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000015"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7538), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7539) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7690), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7690) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7838), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7838) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7846), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7846) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7853), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7853) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7859), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7859) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7865), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7866) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7872), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7872) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7878), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7879) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7885), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7885) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7891), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7891) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7898), new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7898) });
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_usage_logs_data_source",
|
||||
table: "usage_logs",
|
||||
column: "data_source");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_usage_logs_print_job_id",
|
||||
table: "usage_logs",
|
||||
column: "print_job_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_usage_logs_printer_id",
|
||||
table: "usage_logs",
|
||||
column: "printer_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_usage_logs_spool_id",
|
||||
table: "usage_logs",
|
||||
column: "spool_id");
|
||||
|
||||
migrationBuilder.CreateIndex(
|
||||
name: "ix_usage_logs_usage_timestamp",
|
||||
table: "usage_logs",
|
||||
column: "usage_timestamp");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropTable(
|
||||
name: "usage_logs");
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_bases",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1651), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1652) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2055), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2056) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000012"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000013"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000014"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2132), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2133) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_finishes",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("20000000-0000-0000-0000-000000000015"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000001"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000002"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000003"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000004"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2477), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2478) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000005"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000006"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2490), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2491) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000007"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000008"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000009"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000010"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516) });
|
||||
|
||||
migrationBuilder.UpdateData(
|
||||
table: "material_modifiers",
|
||||
keyColumn: "id",
|
||||
keyValue: new Guid("30000000-0000-0000-0000-000000000011"),
|
||||
columns: new[] { "created_at", "updated_at" },
|
||||
values: new object[] { new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2522), new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2523) });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -145,50 +145,50 @@ namespace Extrudex.Infrastructure.Data.Migrations
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(6535),
|
||||
DensityGperCm3 = 1.24m,
|
||||
Name = "PLA",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1096)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(6535)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7016),
|
||||
DensityGperCm3 = 1.27m,
|
||||
Name = "PETG",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1620)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7016)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7027),
|
||||
DensityGperCm3 = 1.04m,
|
||||
Name = "ABS",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1630)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7028)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7034),
|
||||
DensityGperCm3 = 1.07m,
|
||||
Name = "ASA",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1638)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7035)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7042),
|
||||
DensityGperCm3 = 1.21m,
|
||||
Name = "TPU",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1645)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7042)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1651),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7049),
|
||||
DensityGperCm3 = 1.14m,
|
||||
Name = "Nylon",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1652)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7049)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -232,122 +232,122 @@ namespace Extrudex.Infrastructure.Data.Migrations
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000001"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7291),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(1850)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7292)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000002"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7453),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Matte",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2041)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7453)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000003"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7461),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Silk",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2049)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7461)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000004"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2055),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7468),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Glitter",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2056)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7468)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000005"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7474),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Marble",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2062)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7474)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000006"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7480),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Sparkle",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2068)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7481)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000007"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7487),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2075)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7487)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000008"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7493),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
Name = "Matte",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2081)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7493)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000009"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7500),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
Name = "Silk",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2100)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7500)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000010"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7507),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2107)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7507)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000011"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7513),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
Name = "Matte",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2113)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7513)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000012"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7519),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2120)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7520)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000013"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7526),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
Name = "Matte",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2126)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7526)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000014"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2132),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7532),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000005"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2133)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7532)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("20000000-0000-0000-0000-000000000015"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7538),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
Name = "Basic",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2139)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7539)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -391,90 +391,90 @@ namespace Extrudex.Infrastructure.Data.Migrations
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000001"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7690),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Carbon Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2304)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7690)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000002"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7838),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Glass Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2463)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7838)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000003"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7846),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Wood Fill",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2471)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7846)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000004"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2477),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7853),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000001"),
|
||||
Name = "Glow-in-the-Dark",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2478)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7853)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000005"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7859),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
Name = "Carbon Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2484)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7859)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000006"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2490),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7865),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000002"),
|
||||
Name = "Glass Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2491)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7866)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000007"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7872),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
Name = "Carbon Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2497)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7872)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000008"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7878),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000003"),
|
||||
Name = "Glass Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2503)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7879)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000009"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7885),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000004"),
|
||||
Name = "Carbon Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2510)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7885)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000010"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7891),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
Name = "Carbon Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2516)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7891)
|
||||
},
|
||||
new
|
||||
{
|
||||
Id = new Guid("30000000-0000-0000-0000-000000000011"),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2522),
|
||||
CreatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7898),
|
||||
MaterialBaseId = new Guid("10000000-0000-0000-0000-000000000006"),
|
||||
Name = "Glass Fiber",
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 13, 14, 18, 745, DateTimeKind.Utc).AddTicks(2523)
|
||||
UpdatedAt = new DateTime(2026, 4, 26, 18, 43, 28, 895, DateTimeKind.Utc).AddTicks(7898)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -806,6 +806,81 @@ namespace Extrudex.Infrastructure.Data.Migrations
|
||||
b.ToTable("spools", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("Extrudex.Domain.Entities.UsageLog", b =>
|
||||
{
|
||||
b.Property<Guid>("Id")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("id");
|
||||
|
||||
b.Property<DateTime>("CreatedAt")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("created_at")
|
||||
.HasDefaultValueSql("now() at time zone 'utc'");
|
||||
|
||||
b.Property<string>("DataSource")
|
||||
.IsRequired()
|
||||
.HasMaxLength(50)
|
||||
.HasColumnType("character varying(50)")
|
||||
.HasColumnName("data_source");
|
||||
|
||||
b.Property<decimal>("GramsUsed")
|
||||
.HasPrecision(10, 2)
|
||||
.HasColumnType("numeric(10,2)")
|
||||
.HasColumnName("grams_used");
|
||||
|
||||
b.Property<decimal?>("MmExtruded")
|
||||
.HasPrecision(12, 2)
|
||||
.HasColumnType("numeric(12,2)")
|
||||
.HasColumnName("mm_extruded");
|
||||
|
||||
b.Property<string>("Notes")
|
||||
.HasMaxLength(2000)
|
||||
.HasColumnType("character varying(2000)")
|
||||
.HasColumnName("notes");
|
||||
|
||||
b.Property<Guid?>("PrintJobId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("print_job_id");
|
||||
|
||||
b.Property<Guid?>("PrinterId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("printer_id");
|
||||
|
||||
b.Property<Guid>("SpoolId")
|
||||
.HasColumnType("uuid")
|
||||
.HasColumnName("spool_id");
|
||||
|
||||
b.Property<DateTime>("UpdatedAt")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("updated_at")
|
||||
.HasDefaultValueSql("now() at time zone 'utc'");
|
||||
|
||||
b.Property<DateTime>("UsageTimestamp")
|
||||
.HasColumnType("timestamp with time zone")
|
||||
.HasColumnName("usage_timestamp");
|
||||
|
||||
b.HasKey("Id");
|
||||
|
||||
b.HasIndex("DataSource")
|
||||
.HasDatabaseName("ix_usage_logs_data_source");
|
||||
|
||||
b.HasIndex("PrintJobId")
|
||||
.HasDatabaseName("ix_usage_logs_print_job_id");
|
||||
|
||||
b.HasIndex("PrinterId")
|
||||
.HasDatabaseName("ix_usage_logs_printer_id");
|
||||
|
||||
b.HasIndex("SpoolId")
|
||||
.HasDatabaseName("ix_usage_logs_spool_id");
|
||||
|
||||
b.HasIndex("UsageTimestamp")
|
||||
.HasDatabaseName("ix_usage_logs_usage_timestamp");
|
||||
|
||||
b.ToTable("usage_logs", (string)null);
|
||||
});
|
||||
|
||||
modelBuilder.Entity("Extrudex.Domain.Entities.AmsSlot", b =>
|
||||
{
|
||||
b.HasOne("Extrudex.Domain.Entities.AmsUnit", "AmsUnit")
|
||||
@@ -912,6 +987,34 @@ namespace Extrudex.Infrastructure.Data.Migrations
|
||||
b.Navigation("MaterialModifier");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("Extrudex.Domain.Entities.UsageLog", b =>
|
||||
{
|
||||
b.HasOne("Extrudex.Domain.Entities.PrintJob", "PrintJob")
|
||||
.WithMany()
|
||||
.HasForeignKey("PrintJobId")
|
||||
.OnDelete(DeleteBehavior.SetNull)
|
||||
.HasConstraintName("fk_usage_logs_print_job");
|
||||
|
||||
b.HasOne("Extrudex.Domain.Entities.Printer", "Printer")
|
||||
.WithMany()
|
||||
.HasForeignKey("PrinterId")
|
||||
.OnDelete(DeleteBehavior.SetNull)
|
||||
.HasConstraintName("fk_usage_logs_printer");
|
||||
|
||||
b.HasOne("Extrudex.Domain.Entities.Spool", "Spool")
|
||||
.WithMany()
|
||||
.HasForeignKey("SpoolId")
|
||||
.OnDelete(DeleteBehavior.Restrict)
|
||||
.IsRequired()
|
||||
.HasConstraintName("fk_usage_logs_spool");
|
||||
|
||||
b.Navigation("PrintJob");
|
||||
|
||||
b.Navigation("Printer");
|
||||
|
||||
b.Navigation("Spool");
|
||||
});
|
||||
|
||||
modelBuilder.Entity("Extrudex.Domain.Entities.AmsUnit", b =>
|
||||
{
|
||||
b.Navigation("Slots");
|
||||
|
||||
158
backend/Infrastructure/Services/CostPerPrintService.cs
Normal file
158
backend/Infrastructure/Services/CostPerPrintService.cs
Normal file
@@ -0,0 +1,158 @@
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the cost of goods sold (COGS) per print job using the spool's
|
||||
/// purchase price and the print job's derived grams consumed.
|
||||
///
|
||||
/// Formula:
|
||||
/// cost_per_gram = purchase_price / weight_total_grams
|
||||
/// cost_per_print = grams_derived × cost_per_gram
|
||||
///
|
||||
/// Handles missing data gracefully — if the spool has no purchase price or
|
||||
/// weight recorded, the result includes warnings and null cost fields
|
||||
/// instead of throwing exceptions.
|
||||
/// </summary>
|
||||
public class CostPerPrintService : ICostPerPrintService
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
private readonly ILogger<CostPerPrintService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="CostPerPrintService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="dbContext">The database context for data access.</param>
|
||||
/// <param name="logger">The logger for diagnostic output.</param>
|
||||
public CostPerPrintService(ExtrudexDbContext dbContext, ILogger<CostPerPrintService> logger)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<CostPerPrintResult> CalculateAsync(Guid printJobId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogDebug("Calculating cost per print for job {PrintJobId}", printJobId);
|
||||
|
||||
var job = await _dbContext.PrintJobs
|
||||
.Include(j => j.Spool)
|
||||
.ThenInclude(s => s!.MaterialBase)
|
||||
.FirstOrDefaultAsync(j => j.Id == printJobId, cancellationToken);
|
||||
|
||||
if (job is null)
|
||||
{
|
||||
_logger.LogWarning("Print job {PrintJobId} not found for cost calculation", printJobId);
|
||||
return new CostPerPrintResult
|
||||
{
|
||||
PrintJobId = printJobId,
|
||||
Warnings = new List<string> { $"Print job with ID '{printJobId}' not found." }
|
||||
};
|
||||
}
|
||||
|
||||
return BuildResult(job);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<CostPerPrintResult>> CalculateBySpoolAsync(
|
||||
Guid spoolId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogDebug("Calculating cost per print for all jobs on spool {SpoolId}", spoolId);
|
||||
|
||||
var jobs = await _dbContext.PrintJobs
|
||||
.Include(j => j.Spool)
|
||||
.ThenInclude(s => s!.MaterialBase)
|
||||
.Where(j => j.SpoolId == spoolId)
|
||||
.OrderByDescending(j => j.CreatedAt)
|
||||
.ToListAsync(cancellationToken);
|
||||
|
||||
if (jobs.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No print jobs found for spool {SpoolId}", spoolId);
|
||||
return Array.Empty<CostPerPrintResult>();
|
||||
}
|
||||
|
||||
return jobs.Select(BuildResult).ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a <see cref="CostPerPrintResult"/> from a print job entity.
|
||||
/// Computes cost_per_gram and cost_per_print when all required data is available.
|
||||
/// Populates warnings when data is missing or incomplete.
|
||||
/// </summary>
|
||||
/// <param name="job">The print job entity with Spool navigation loaded.</param>
|
||||
/// <returns>A cost calculation result with breakdown and any warnings.</returns>
|
||||
private CostPerPrintResult BuildResult(Domain.Entities.PrintJob job)
|
||||
{
|
||||
var warnings = new List<string>();
|
||||
var spool = job.Spool;
|
||||
|
||||
// Map what we always have
|
||||
var result = new CostPerPrintResult
|
||||
{
|
||||
PrintJobId = job.Id,
|
||||
PrintName = job.PrintName,
|
||||
SpoolId = job.SpoolId,
|
||||
SpoolSerial = spool?.SpoolSerial ?? string.Empty,
|
||||
MmExtruded = job.MmExtruded,
|
||||
GramsDerived = job.GramsDerived,
|
||||
};
|
||||
|
||||
// Guard: spool must be loaded
|
||||
if (spool is null)
|
||||
{
|
||||
warnings.Add("Spool data is not available for this print job.");
|
||||
result.Warnings = warnings;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Capture purchase price
|
||||
result.PurchasePrice = spool.PurchasePrice;
|
||||
result.WeightTotalGrams = spool.WeightTotalGrams;
|
||||
|
||||
// Check for missing purchase price
|
||||
if (!spool.PurchasePrice.HasValue)
|
||||
{
|
||||
warnings.Add(
|
||||
"Spool purchase price is not recorded. Cost calculation requires a purchase price on the spool.");
|
||||
}
|
||||
|
||||
// Check for zero or negative weight — prevents division by zero
|
||||
if (spool.WeightTotalGrams <= 0)
|
||||
{
|
||||
warnings.Add(
|
||||
"Spool total weight is zero or not recorded. Cost calculation requires a positive weight_total_grams on the spool.");
|
||||
}
|
||||
|
||||
// Check for zero grams derived
|
||||
if (job.GramsDerived <= 0)
|
||||
{
|
||||
warnings.Add(
|
||||
"Derived grams consumed is zero. Ensure mm_extruded, filament diameter, and material density are recorded for this print job.");
|
||||
}
|
||||
|
||||
// If all data is present and valid, compute the cost
|
||||
if (spool.PurchasePrice.HasValue && spool.WeightTotalGrams > 0 && job.GramsDerived > 0)
|
||||
{
|
||||
var costPerGram = spool.PurchasePrice.Value / spool.WeightTotalGrams;
|
||||
result.CostPerGram = Math.Round(costPerGram, 6);
|
||||
result.CostPerPrint = Math.Round(job.GramsDerived * costPerGram, 4);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cost calculated for job {PrintJobId}: {GramsDerived}g × {CostPerGram:C}/g = {CostPerPrint:C}",
|
||||
job.Id, job.GramsDerived, result.CostPerGram, result.CostPerPrint);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Cost calculation incomplete for job {PrintJobId}: missing data (warnings: {WarningCount})",
|
||||
job.Id, warnings.Count);
|
||||
}
|
||||
|
||||
result.Warnings = warnings;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
79
backend/Infrastructure/Services/FilamentUsageService.cs
Normal file
79
backend/Infrastructure/Services/FilamentUsageService.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// EF Core–backed implementation of the filament usage service.
|
||||
/// Persists usage records to the database and provides query methods
|
||||
/// for retrieving usage by print job or spool.
|
||||
/// </summary>
|
||||
public class FilamentUsageService : IFilamentUsageService
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
private readonly ILogger<FilamentUsageService> _logger;
|
||||
|
||||
public FilamentUsageService(
|
||||
ExtrudexDbContext dbContext,
|
||||
ILogger<FilamentUsageService> logger)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<FilamentUsage> RecordUsageAsync(
|
||||
Guid printJobId,
|
||||
Guid spoolId,
|
||||
Guid printerId,
|
||||
decimal gramsUsed,
|
||||
decimal mmExtruded,
|
||||
string? notes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var usage = new FilamentUsage
|
||||
{
|
||||
PrintJobId = printJobId,
|
||||
SpoolId = spoolId,
|
||||
PrinterId = printerId,
|
||||
GramsUsed = gramsUsed,
|
||||
MmExtruded = mmExtruded,
|
||||
RecordedAt = DateTime.UtcNow,
|
||||
Notes = notes
|
||||
};
|
||||
|
||||
_dbContext.FilamentUsages.Add(usage);
|
||||
await _dbContext.SaveChangesAsync(cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded filament usage: {Grams}g / {Mm}mm for print job {JobId} on spool {SpoolId}",
|
||||
gramsUsed, mmExtruded, printJobId, spoolId);
|
||||
|
||||
return usage;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<FilamentUsage>> GetByPrintJobAsync(
|
||||
Guid printJobId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _dbContext.FilamentUsages
|
||||
.Where(u => u.PrintJobId == printJobId)
|
||||
.OrderByDescending(u => u.RecordedAt)
|
||||
.ToListAsync(cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<FilamentUsage>> GetBySpoolAsync(
|
||||
Guid spoolId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _dbContext.FilamentUsages
|
||||
.Where(u => u.SpoolId == spoolId)
|
||||
.OrderByDescending(u => u.RecordedAt)
|
||||
.ToListAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
139
backend/Infrastructure/Services/FilamentUsageSyncService.cs
Normal file
139
backend/Infrastructure/Services/FilamentUsageSyncService.cs
Normal file
@@ -0,0 +1,139 @@
|
||||
using Extrudex.Domain.Enums;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Configuration;
|
||||
|
||||
/// <summary>
|
||||
/// Service that syncs filament usage data from Moonraker printers into the
|
||||
/// Extrudex database. Queries all active Moonraker printers, fetches their
|
||||
/// current filament usage metrics, and updates spool remaining weights and
|
||||
/// print job records.
|
||||
/// </summary>
|
||||
public class FilamentUsageSyncService : IFilamentUsageSyncService
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
private readonly IMoonrakerClient _moonrakerClient;
|
||||
private readonly ILogger<FilamentUsageSyncService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new FilamentUsageSyncService.
|
||||
/// </summary>
|
||||
/// <param name="dbContext">The EF Core database context for persisting updates.</param>
|
||||
/// <param name="moonrakerClient">The Moonraker HTTP client for fetching printer data.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public FilamentUsageSyncService(
|
||||
ExtrudexDbContext dbContext,
|
||||
IMoonrakerClient moonrakerClient,
|
||||
ILogger<FilamentUsageSyncService> logger)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_moonrakerClient = moonrakerClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> SyncAllAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation("Starting filament usage sync cycle");
|
||||
|
||||
var printers = await _dbContext.Printers
|
||||
.Where(p => p.IsActive && p.ConnectionType == ConnectionType.Moonraker)
|
||||
.Include(p => p.AmsUnits)
|
||||
.ThenInclude(u => u.Slots)
|
||||
.ThenInclude(s => s.Spool)
|
||||
.ToListAsync(cancellationToken);
|
||||
|
||||
if (printers.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("No active Moonraker printers found — skipping sync");
|
||||
return 0;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Found {PrinterCount} active Moonraker printer(s) to sync", printers.Count);
|
||||
|
||||
var syncedCount = 0;
|
||||
|
||||
foreach (var printer in printers)
|
||||
{
|
||||
try
|
||||
{
|
||||
var usageData = await _moonrakerClient.GetFilamentUsageAsync(
|
||||
printer.HostnameOrIp,
|
||||
printer.Port,
|
||||
printer.ApiKey,
|
||||
cancellationToken);
|
||||
|
||||
if (usageData.Count == 0)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"No usage data returned from printer {PrinterName} ({Host}:{Port})",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Update spool remaining weights from AMS data
|
||||
UpdateSpoolWeights(printer, usageData);
|
||||
|
||||
// Mark printer as seen and idle (reachable = idle, not printing)
|
||||
printer.LastSeenAt = DateTime.UtcNow;
|
||||
printer.Status = PrinterStatus.Idle;
|
||||
|
||||
syncedCount++;
|
||||
_logger.LogInformation(
|
||||
"Successfully synced filament usage from printer {PrinterName}",
|
||||
printer.Name);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Error syncing filament usage from printer {PrinterName} ({Host}:{Port})",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
}
|
||||
}
|
||||
|
||||
await _dbContext.SaveChangesAsync(cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Filament usage sync cycle complete — {SyncedCount}/{TotalCount} printers synced",
|
||||
syncedCount, printers.Count);
|
||||
|
||||
return syncedCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates spool remaining weights based on usage data received from Moonraker.
|
||||
/// For printers with AMS units, updates the remaining weight on each slot's spool.
|
||||
/// </summary>
|
||||
private void UpdateSpoolWeights(
|
||||
Domain.Entities.Printer printer,
|
||||
Dictionary<string, decimal> usageData)
|
||||
{
|
||||
// Update AMS slot remaining weights if available
|
||||
foreach (var amsUnit in printer.AmsUnits)
|
||||
{
|
||||
foreach (var slot in amsUnit.Slots)
|
||||
{
|
||||
if (slot.Spool != null && slot.RemainingWeightG.HasValue)
|
||||
{
|
||||
// Sync the AMS-reported remaining weight to the spool
|
||||
slot.Spool.WeightRemainingGrams = slot.RemainingWeightG.Value;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Updated spool {SpoolSerial} remaining weight to {Weight}g",
|
||||
slot.Spool.SpoolSerial, slot.RemainingWeightG.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If usage data contains extruded mm, log it for observability
|
||||
if (usageData.TryGetValue("mm_extruded", out var mmExtruded) && mmExtruded > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Printer {PrinterName} reports {MmExtruded}mm filament extruded in latest job",
|
||||
printer.Name, mmExtruded);
|
||||
}
|
||||
}
|
||||
}
|
||||
95
backend/Infrastructure/Services/LowStockDetector.cs
Normal file
95
backend/Infrastructure/Services/LowStockDetector.cs
Normal file
@@ -0,0 +1,95 @@
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Detects low-stock filament spools by comparing the remaining weight percentage
|
||||
/// against a configurable threshold. The threshold can be set via:
|
||||
/// 1. EXTRUDEX_LOW_STOCK_THRESHOLD env var (highest priority, e.g. "25")
|
||||
/// 2. FilamentAlerts:LowStockThresholdPercent in appsettings.json
|
||||
/// 3. Default: 20% (a standard spool is "low" when ≤20% remains)
|
||||
/// </summary>
|
||||
public class LowStockDetector : ILowStockDetector
|
||||
{
|
||||
private readonly ILogger<LowStockDetector> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// The percentage threshold below which a spool is considered low stock.
|
||||
/// For example, 20 means a spool is "low" when ≤20% of its filament remains.
|
||||
/// </summary>
|
||||
public decimal LowStockThresholdPercent { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="LowStockDetector"/> class.
|
||||
/// Reads the low-stock threshold from configuration with env var override support.
|
||||
/// </summary>
|
||||
/// <param name="configuration">Application configuration for threshold settings.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public LowStockDetector(IConfiguration configuration, ILogger<LowStockDetector> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
|
||||
// Priority: env var > appsettings > default (20%)
|
||||
var envThreshold = Environment.GetEnvironmentVariable("EXTRUDEX_LOW_STOCK_THRESHOLD");
|
||||
var configThreshold = configuration.GetValue<decimal?>("FilamentAlerts:LowStockThresholdPercent");
|
||||
|
||||
if (!string.IsNullOrEmpty(envThreshold) && decimal.TryParse(envThreshold, out var parsedEnv))
|
||||
{
|
||||
LowStockThresholdPercent = Math.Clamp(parsedEnv, 0m, 100m);
|
||||
_logger.LogInformation(
|
||||
"Low-stock threshold set from env var EXTRUDEX_LOW_STOCK_THRESHOLD: {Threshold}%",
|
||||
LowStockThresholdPercent);
|
||||
}
|
||||
else if (configThreshold.HasValue)
|
||||
{
|
||||
LowStockThresholdPercent = Math.Clamp(configThreshold.Value, 0m, 100m);
|
||||
_logger.LogInformation(
|
||||
"Low-stock threshold set from config FilamentAlerts:LowStockThresholdPercent: {Threshold}%",
|
||||
LowStockThresholdPercent);
|
||||
}
|
||||
else
|
||||
{
|
||||
LowStockThresholdPercent = 20m;
|
||||
_logger.LogInformation(
|
||||
"Low-stock threshold using default: {Threshold}%", LowStockThresholdPercent);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool IsLowStock(decimal weightRemainingGrams, decimal weightTotalGrams)
|
||||
{
|
||||
if (weightTotalGrams <= 0m)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Spool with total weight {Total}g cannot be evaluated for low stock — treating as not low",
|
||||
weightTotalGrams);
|
||||
return false;
|
||||
}
|
||||
|
||||
var remainingPercent = GetRemainingWeightPercent(weightRemainingGrams, weightTotalGrams);
|
||||
var isLow = remainingPercent <= LowStockThresholdPercent;
|
||||
|
||||
if (isLow)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Spool is LOW STOCK: {Remaining}g / {Total}g = {Percent:F1}% (threshold: {Threshold}%)",
|
||||
weightRemainingGrams, weightTotalGrams, remainingPercent, LowStockThresholdPercent);
|
||||
}
|
||||
|
||||
return isLow;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public decimal GetRemainingWeightPercent(decimal weightRemainingGrams, decimal weightTotalGrams)
|
||||
{
|
||||
if (weightTotalGrams <= 0m)
|
||||
return 0m;
|
||||
|
||||
return Math.Round(
|
||||
(weightRemainingGrams / weightTotalGrams) * 100m,
|
||||
1,
|
||||
MidpointRounding.AwayFromZero);
|
||||
}
|
||||
}
|
||||
447
backend/Infrastructure/Services/MoonrakerClient.cs
Normal file
447
backend/Infrastructure/Services/MoonrakerClient.cs
Normal file
@@ -0,0 +1,447 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Extrudex.Domain.DTOs.Moonraker;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP client for communicating with Moonraker REST API endpoints
|
||||
/// on Klipper-based printers (e.g., Elegoo Centauri Carbon).
|
||||
/// Provides strongly-typed methods for server discovery, printer status,
|
||||
/// print job history, and real-time telemetry.
|
||||
/// </summary>
|
||||
public class MoonrakerClient : IMoonrakerClient
|
||||
{
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ILogger<MoonrakerClient> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new MoonrakerClient with the configured HTTP client and logger.
|
||||
/// </summary>
|
||||
/// <param name="httpClient">The HTTP client for making requests to Moonraker endpoints.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public MoonrakerClient(HttpClient httpClient, ILogger<MoonrakerClient> logger)
|
||||
{
|
||||
_httpClient = httpClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MoonrakerServerInfo?> GetServerInfoAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var baseUrl = BuildBaseUrl(hostnameOrIp, port);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = CreateRequest(HttpMethod.Get, $"{baseUrl}/server/info", apiKey);
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var json = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken);
|
||||
|
||||
var serverInfo = new MoonrakerServerInfo();
|
||||
|
||||
if (json.TryGetProperty("result", out var result))
|
||||
{
|
||||
if (result.TryGetProperty("hostname", out var hostname))
|
||||
serverInfo.Hostname = hostname.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("software_version", out var version))
|
||||
serverInfo.SoftwareVersion = version.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("cpu_info", out var cpuInfo))
|
||||
serverInfo.CpuInfo = cpuInfo.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("klippy_connected", out var klippyConnected))
|
||||
serverInfo.KlippyConnected = klippyConnected.GetBoolean();
|
||||
if (result.TryGetProperty("klippy_state", out var klippyState))
|
||||
serverInfo.KlippyState = klippyState.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("api_key_required", out var apiKeyRequired))
|
||||
serverInfo.ApiKeyRequired = apiKeyRequired.GetBoolean();
|
||||
if (result.TryGetProperty("plugins", out var plugins))
|
||||
serverInfo.Plugins = plugins.EnumerateArray()
|
||||
.Select(p => p.GetString() ?? string.Empty)
|
||||
.Where(s => !string.IsNullOrEmpty(s))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved server info from Moonraker at {Host}:{Port} — version {Version}, klippy {State}",
|
||||
hostnameOrIp, port, serverInfo.SoftwareVersion, serverInfo.KlippyState);
|
||||
|
||||
return serverInfo;
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to retrieve server info from Moonraker at {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to parse Moonraker server info response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> IsReachableAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var serverInfo = await GetServerInfoAsync(hostnameOrIp, port, apiKey, cancellationToken);
|
||||
return serverInfo is not null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MoonrakerPrinterInfo?> GetPrinterInfoAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var baseUrl = BuildBaseUrl(hostnameOrIp, port);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = CreateRequest(HttpMethod.Get, $"{baseUrl}/printer/info", apiKey);
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var json = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken);
|
||||
|
||||
var printerInfo = new MoonrakerPrinterInfo();
|
||||
|
||||
if (json.TryGetProperty("result", out var result))
|
||||
{
|
||||
if (result.TryGetProperty("state", out var state))
|
||||
printerInfo.State = state.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("state_message", out var stateMessage))
|
||||
printerInfo.StateMessage = stateMessage.GetString() ?? string.Empty;
|
||||
if (result.TryGetProperty("klippy_ready", out var klippyReady))
|
||||
printerInfo.KlippyReady = klippyReady.GetBoolean();
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved printer info from Moonraker at {Host}:{Port} — state: {State}",
|
||||
hostnameOrIp, port, printerInfo.State);
|
||||
|
||||
return printerInfo;
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to retrieve printer info from Moonraker at {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to parse Moonraker printer info response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MoonrakerHistoryResponse> GetPrintHistoryAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
int limit = 50,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var baseUrl = BuildBaseUrl(hostnameOrIp, port);
|
||||
var historyResponse = new MoonrakerHistoryResponse();
|
||||
|
||||
try
|
||||
{
|
||||
using var request = CreateRequest(
|
||||
HttpMethod.Get,
|
||||
$"{baseUrl}/server/history/items?limit={limit}",
|
||||
apiKey);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var json = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken);
|
||||
|
||||
if (json.TryGetProperty("result", out var result))
|
||||
{
|
||||
if (result.TryGetProperty("count", out var count))
|
||||
historyResponse.TotalCount = count.GetInt32();
|
||||
|
||||
if (result.TryGetProperty("items", out var items))
|
||||
{
|
||||
foreach (var item in items.EnumerateArray())
|
||||
{
|
||||
var job = MapPrintJob(item);
|
||||
historyResponse.Items.Add(job);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved {JobCount} print history items from Moonraker at {Host}:{Port}",
|
||||
historyResponse.Items.Count, hostnameOrIp, port);
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to retrieve print history from Moonraker at {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to parse Moonraker history response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
}
|
||||
|
||||
return historyResponse;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MoonrakerPrintStats?> GetPrintStatsAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var baseUrl = BuildBaseUrl(hostnameOrIp, port);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = CreateRequest(
|
||||
HttpMethod.Get,
|
||||
$"{baseUrl}/printer/objects/query?print_stats",
|
||||
apiKey);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var json = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken);
|
||||
|
||||
if (json.TryGetProperty("result", out var result)
|
||||
&& result.TryGetProperty("status", out var status)
|
||||
&& status.TryGetProperty("print_stats", out var printStats))
|
||||
{
|
||||
var stats = MapPrintStats(printStats);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved print stats from Moonraker at {Host}:{Port} — state: {State}, filament: {FilamentMm}mm",
|
||||
hostnameOrIp, port, stats.State, stats.FilamentUsedMm);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"Moonraker print_stats not found in response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to retrieve print stats from Moonraker at {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to parse Moonraker print stats response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MoonrakerDisplayStatus?> GetDisplayStatusAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var baseUrl = BuildBaseUrl(hostnameOrIp, port);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = CreateRequest(
|
||||
HttpMethod.Get,
|
||||
$"{baseUrl}/printer/objects/query?display_status",
|
||||
apiKey);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var json = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken);
|
||||
|
||||
if (json.TryGetProperty("result", out var result)
|
||||
&& result.TryGetProperty("status", out var status)
|
||||
&& status.TryGetProperty("display_status", out var displayStatus))
|
||||
{
|
||||
var ds = new MoonrakerDisplayStatus();
|
||||
|
||||
if (displayStatus.TryGetProperty("progress", out var progress))
|
||||
ds.Progress = progress.GetDecimal();
|
||||
if (displayStatus.TryGetProperty("message", out var message))
|
||||
ds.Message = message.GetString() ?? string.Empty;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved display status from Moonraker at {Host}:{Port} — progress: {Progress:P0}",
|
||||
hostnameOrIp, port, ds.Progress);
|
||||
|
||||
return ds;
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"Moonraker display_status not found in response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to retrieve display status from Moonraker at {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to parse Moonraker display status response from {Host}:{Port}",
|
||||
hostnameOrIp, port);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<Dictionary<string, decimal>> GetFilamentUsageAsync(
|
||||
string hostnameOrIp,
|
||||
int port,
|
||||
string? apiKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Delegate to the typed GetPrintHistoryAsync and extract metrics
|
||||
var history = await GetPrintHistoryAsync(hostnameOrIp, port, apiKey, limit: 1, cancellationToken);
|
||||
var result = new Dictionary<string, decimal>();
|
||||
|
||||
if (history.Items.Count > 0)
|
||||
{
|
||||
var latestJob = history.Items[0];
|
||||
result["mm_extruded"] = latestJob.FilamentUsedMm;
|
||||
result["print_duration_seconds"] = latestJob.PrintDurationSeconds;
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Retrieved filament usage from Moonraker at {Host}:{Port}: {MetricCount} metrics",
|
||||
hostnameOrIp, port, result.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the base URL for Moonraker API calls from hostname and port.
|
||||
/// </summary>
|
||||
private static string BuildBaseUrl(string hostnameOrIp, int port)
|
||||
{
|
||||
return $"http://{hostnameOrIp}:{port}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HttpRequestMessage with the optional API key header.
|
||||
/// </summary>
|
||||
private static HttpRequestMessage CreateRequest(HttpMethod method, string url, string? apiKey)
|
||||
{
|
||||
var request = new HttpRequestMessage(method, url);
|
||||
if (!string.IsNullOrEmpty(apiKey))
|
||||
{
|
||||
request.Headers.Add("X-Api-Key", apiKey);
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maps a JSON element representing a Moonraker print job history item
|
||||
/// to a <see cref="MoonrakerPrintJob"/> DTO.
|
||||
/// </summary>
|
||||
private static MoonrakerPrintJob MapPrintJob(JsonElement item)
|
||||
{
|
||||
var job = new MoonrakerPrintJob();
|
||||
|
||||
if (item.TryGetProperty("job_id", out var jobId))
|
||||
job.JobId = jobId.GetString() ?? string.Empty;
|
||||
if (item.TryGetProperty("filename", out var filename))
|
||||
job.Filename = filename.GetString() ?? string.Empty;
|
||||
if (item.TryGetProperty("status", out var status))
|
||||
job.Status = status.GetString() ?? string.Empty;
|
||||
if (item.TryGetProperty("filament_used", out var filamentUsed))
|
||||
job.FilamentUsedMm = filamentUsed.GetDecimal();
|
||||
if (item.TryGetProperty("print_duration", out var printDuration))
|
||||
job.PrintDurationSeconds = printDuration.GetDecimal();
|
||||
if (item.TryGetProperty("total_duration", out var totalDuration))
|
||||
job.TotalDurationSeconds = totalDuration.GetDecimal();
|
||||
|
||||
if (item.TryGetProperty("start_time", out var startTime) && startTime.ValueKind != JsonValueKind.Null)
|
||||
{
|
||||
if (startTime.TryGetInt64(out var startTimeSeconds))
|
||||
job.StartTime = DateTimeOffset.FromUnixTimeSeconds(startTimeSeconds).UtcDateTime;
|
||||
}
|
||||
|
||||
if (item.TryGetProperty("end_time", out var endTime) && endTime.ValueKind != JsonValueKind.Null)
|
||||
{
|
||||
if (endTime.TryGetInt64(out var endTimeSeconds))
|
||||
job.EndTime = DateTimeOffset.FromUnixTimeSeconds(endTimeSeconds).UtcDateTime;
|
||||
}
|
||||
|
||||
if (item.TryGetProperty("metadata", out var metadata) && metadata.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
foreach (var prop in metadata.EnumerateObject())
|
||||
{
|
||||
object value = prop.Value.ValueKind switch
|
||||
{
|
||||
JsonValueKind.String => prop.Value.GetString() ?? string.Empty,
|
||||
JsonValueKind.Number => prop.Value.GetDecimal(),
|
||||
JsonValueKind.True => true,
|
||||
JsonValueKind.False => false,
|
||||
_ => prop.Value.ToString() ?? string.Empty
|
||||
};
|
||||
job.Metadata[prop.Name] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return job;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maps a JSON element representing Moonraker print_stats
|
||||
/// to a <see cref="MoonrakerPrintStats"/> DTO.
|
||||
/// </summary>
|
||||
private static MoonrakerPrintStats MapPrintStats(JsonElement printStats)
|
||||
{
|
||||
var stats = new MoonrakerPrintStats();
|
||||
|
||||
if (printStats.TryGetProperty("state", out var state))
|
||||
stats.State = state.GetString() ?? string.Empty;
|
||||
if (printStats.TryGetProperty("filament_used", out var filamentUsed))
|
||||
stats.FilamentUsedMm = filamentUsed.GetDecimal();
|
||||
if (printStats.TryGetProperty("print_duration", out var printDuration))
|
||||
stats.PrintDurationSeconds = printDuration.GetDecimal();
|
||||
if (printStats.TryGetProperty("filename", out var filename) && filename.ValueKind != JsonValueKind.Null)
|
||||
stats.Filename = filename.GetString();
|
||||
if (printStats.TryGetProperty("message", out var message) && message.ValueKind != JsonValueKind.Null)
|
||||
stats.Message = message.GetString();
|
||||
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
320
backend/Infrastructure/Services/MoonrakerPrinterSyncService.cs
Normal file
320
backend/Infrastructure/Services/MoonrakerPrinterSyncService.cs
Normal file
@@ -0,0 +1,320 @@
|
||||
using Extrudex.Domain.DTOs.Moonraker;
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Enums;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Configuration;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service that syncs Moonraker printer status and print job history into the
|
||||
/// Extrudex database. Queries all active Moonraker printers, fetches their
|
||||
/// current operational state, and maps completed print jobs to PrintJob and
|
||||
/// FilamentUsage entities with derived gram calculations.
|
||||
/// </summary>
|
||||
public class MoonrakerPrinterSyncService : IMoonrakerPrinterSyncService
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
private readonly IMoonrakerClient _moonrakerClient;
|
||||
private readonly ILogger<MoonrakerPrinterSyncService> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new MoonrakerPrinterSyncService.
|
||||
/// </summary>
|
||||
/// <param name="dbContext">The EF Core database context for persisting updates.</param>
|
||||
/// <param name="moonrakerClient">The Moonraker HTTP client for fetching printer data.</param>
|
||||
/// <param name="logger">Logger for diagnostic output.</param>
|
||||
public MoonrakerPrinterSyncService(
|
||||
ExtrudexDbContext dbContext,
|
||||
IMoonrakerClient moonrakerClient,
|
||||
ILogger<MoonrakerPrinterSyncService> logger)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
_moonrakerClient = moonrakerClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> SyncAllAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation("Starting Moonraker printer sync cycle");
|
||||
|
||||
var printers = await _dbContext.Printers
|
||||
.Where(p => p.IsActive && p.ConnectionType == ConnectionType.Moonraker)
|
||||
.Include(p => p.AmsUnits)
|
||||
.ThenInclude(u => u.Slots)
|
||||
.ThenInclude(s => s.Spool)
|
||||
.ThenInclude(s => s.MaterialBase)
|
||||
.Include(p => p.PrintJobs)
|
||||
.ToListAsync(cancellationToken);
|
||||
|
||||
if (printers.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("No active Moonraker printers found — skipping sync");
|
||||
return 0;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Found {PrinterCount} active Moonraker printer(s) to sync", printers.Count);
|
||||
|
||||
var syncedCount = 0;
|
||||
|
||||
foreach (var printer in printers)
|
||||
{
|
||||
try
|
||||
{
|
||||
await SyncPrinterAsync(printer, cancellationToken);
|
||||
syncedCount++;
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Error syncing printer {PrinterName} ({Host}:{Port})",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
|
||||
// Mark printer as offline if we can't reach it
|
||||
printer.Status = PrinterStatus.Offline;
|
||||
}
|
||||
}
|
||||
|
||||
await _dbContext.SaveChangesAsync(cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Moonraker printer sync cycle complete — {SyncedCount}/{TotalCount} printers synced",
|
||||
syncedCount, printers.Count);
|
||||
|
||||
return syncedCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Syncs a single Moonraker printer: updates its status, fetches print history,
|
||||
/// and maps new print jobs to database entities.
|
||||
/// </summary>
|
||||
private async Task SyncPrinterAsync(Printer printer, CancellationToken cancellationToken)
|
||||
{
|
||||
// Step 1: Fetch printer status
|
||||
var printerInfo = await _moonrakerClient.GetPrinterInfoAsync(
|
||||
printer.HostnameOrIp, printer.Port, printer.ApiKey, cancellationToken);
|
||||
|
||||
var printStats = await _moonrakerClient.GetPrintStatsAsync(
|
||||
printer.HostnameOrIp, printer.Port, printer.ApiKey, cancellationToken);
|
||||
|
||||
// Step 2: Update printer status
|
||||
UpdatePrinterStatus(printer, printerInfo, printStats);
|
||||
printer.LastSeenAt = DateTime.UtcNow;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Printer {PrinterName} status updated to {Status}",
|
||||
printer.Name, printer.Status);
|
||||
|
||||
// Step 3: Fetch and map print job history
|
||||
var history = await _moonrakerClient.GetPrintHistoryAsync(
|
||||
printer.HostnameOrIp, printer.Port, printer.ApiKey,
|
||||
limit: 25,
|
||||
cancellationToken);
|
||||
|
||||
if (history.Items.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No print history returned for printer {PrinterName}", printer.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
var newJobsCount = await MapPrintJobsAsync(printer, history.Items, cancellationToken);
|
||||
|
||||
if (newJobsCount > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Mapped {NewJobsCount} new print job(s) from printer {PrinterName}",
|
||||
newJobsCount, printer.Name);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the printer's operational status based on Moonraker telemetry.
|
||||
/// Maps Klipper/Moonraker state strings to the PrinterStatus enum.
|
||||
/// </summary>
|
||||
private void UpdatePrinterStatus(
|
||||
Printer printer,
|
||||
MoonrakerPrinterInfo? printerInfo,
|
||||
MoonrakerPrintStats? printStats)
|
||||
{
|
||||
// Prefer print_stats state — it's the most authoritative
|
||||
if (printStats != null)
|
||||
{
|
||||
printer.Status = printStats.State.ToLowerInvariant() switch
|
||||
{
|
||||
"printing" => PrinterStatus.Printing,
|
||||
"paused" => PrinterStatus.Paused,
|
||||
"complete" => PrinterStatus.Idle,
|
||||
"standby" => PrinterStatus.Idle,
|
||||
"cancelled" => PrinterStatus.Idle,
|
||||
"error" => PrinterStatus.Error,
|
||||
_ => PrinterStatus.Idle
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
// Fall back to printer_info state
|
||||
if (printerInfo != null)
|
||||
{
|
||||
printer.Status = printerInfo.State.ToLowerInvariant() switch
|
||||
{
|
||||
"ready" => PrinterStatus.Idle,
|
||||
"startup" => PrinterStatus.Idle,
|
||||
"shutdown" => PrinterStatus.Offline,
|
||||
"error" => PrinterStatus.Error,
|
||||
"cancelled" => PrinterStatus.Idle,
|
||||
_ => printer.Status // Preserve existing status if unknown
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maps Moonraker print job history items to Extrudex PrintJob and FilamentUsage entities.
|
||||
/// Only creates records for jobs not already tracked (by Moonraker JobId stored in GcodeFilePath).
|
||||
/// </summary>
|
||||
private async Task<int> MapPrintJobsAsync(
|
||||
Printer printer,
|
||||
List<MoonrakerPrintJob> historyItems,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Build a set of already-tracked Moonraker JobIds for this printer
|
||||
// We store the Moonraker JobId in the GcodeFilePath field with a "moonraker:" prefix
|
||||
var trackedJobIds = await _dbContext.PrintJobs
|
||||
.Where(pj => pj.PrinterId == printer.Id && pj.GcodeFilePath != null && pj.GcodeFilePath.StartsWith("moonraker:"))
|
||||
.Select(pj => pj.GcodeFilePath!)
|
||||
.ToListAsync(cancellationToken);
|
||||
|
||||
var trackedIdSet = new HashSet<string>(trackedJobIds);
|
||||
var newJobsCount = 0;
|
||||
|
||||
// Find the default spool for this printer (first active spool in AMS, or first active spool overall)
|
||||
var defaultSpool = FindDefaultSpool(printer);
|
||||
|
||||
foreach (var moonrakerJob in historyItems)
|
||||
{
|
||||
var jobIdKey = $"moonraker:{moonrakerJob.JobId}";
|
||||
|
||||
if (trackedIdSet.Contains(jobIdKey))
|
||||
{
|
||||
continue; // Already tracked — skip
|
||||
}
|
||||
|
||||
// Only map completed, cancelled, or errored jobs (not in_progress)
|
||||
// In-progress jobs will be captured on the next cycle once they finish
|
||||
if (moonrakerJob.Status == "in_progress")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Map Moonraker job status to JobStatus enum
|
||||
var jobStatus = moonrakerJob.Status.ToLowerInvariant() switch
|
||||
{
|
||||
"completed" => JobStatus.Completed,
|
||||
"cancelled" => JobStatus.Cancelled,
|
||||
"error" => JobStatus.Failed,
|
||||
_ => JobStatus.Completed
|
||||
};
|
||||
|
||||
// Calculate derived grams if we have a spool and filament data
|
||||
decimal gramsDerived = 0m;
|
||||
decimal filamentDiameterMm = 1.75m;
|
||||
decimal materialDensity = 1.24m; // PLA default
|
||||
|
||||
if (defaultSpool != null)
|
||||
{
|
||||
filamentDiameterMm = defaultSpool.FilamentDiameterMm;
|
||||
materialDensity = defaultSpool.MaterialBase.DensityGperCm3;
|
||||
gramsDerived = CalculateGrams(moonrakerJob.FilamentUsedMm, filamentDiameterMm, materialDensity);
|
||||
}
|
||||
else if (moonrakerJob.FilamentUsedMm > 0)
|
||||
{
|
||||
gramsDerived = CalculateGrams(moonrakerJob.FilamentUsedMm, 1.75m, 1.24m);
|
||||
_logger.LogWarning(
|
||||
"No default spool found for printer {PrinterName} — using PLA defaults for grams derivation on job {JobId}",
|
||||
printer.Name, moonrakerJob.JobId);
|
||||
}
|
||||
|
||||
var printJob = new PrintJob
|
||||
{
|
||||
PrinterId = printer.Id,
|
||||
SpoolId = defaultSpool?.Id ?? Guid.Empty,
|
||||
PrintName = moonrakerJob.Filename,
|
||||
GcodeFilePath = jobIdKey,
|
||||
MmExtruded = moonrakerJob.FilamentUsedMm,
|
||||
GramsDerived = gramsDerived,
|
||||
StartedAt = moonrakerJob.StartTime,
|
||||
CompletedAt = moonrakerJob.EndTime,
|
||||
Status = jobStatus,
|
||||
DataSource = DataSource.Moonraker,
|
||||
FilamentDiameterAtPrintMm = filamentDiameterMm,
|
||||
MaterialDensityAtPrint = materialDensity,
|
||||
Notes = $"Auto-imported from Moonraker (JobId: {moonrakerJob.JobId})"
|
||||
};
|
||||
|
||||
_dbContext.PrintJobs.Add(printJob);
|
||||
|
||||
// Create a FilamentUsage record if filament was consumed
|
||||
if (moonrakerJob.FilamentUsedMm > 0 && defaultSpool != null)
|
||||
{
|
||||
var usage = new FilamentUsage
|
||||
{
|
||||
PrintJob = printJob,
|
||||
SpoolId = defaultSpool.Id,
|
||||
PrinterId = printer.Id,
|
||||
GramsUsed = gramsDerived,
|
||||
MmExtruded = moonrakerJob.FilamentUsedMm,
|
||||
RecordedAt = DateTime.UtcNow,
|
||||
Notes = $"Auto-imported from Moonraker history (JobId: {moonrakerJob.JobId})"
|
||||
};
|
||||
|
||||
_dbContext.FilamentUsages.Add(usage);
|
||||
}
|
||||
|
||||
newJobsCount++;
|
||||
trackedIdSet.Add(jobIdKey); // Prevent duplicates within this batch
|
||||
}
|
||||
|
||||
return newJobsCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the default spool for a printer. Returns the first spool loaded
|
||||
/// in an AMS slot, or null if no spool is available.
|
||||
/// </summary>
|
||||
private static Spool? FindDefaultSpool(Printer printer)
|
||||
{
|
||||
// Prefer the first active spool in an AMS slot
|
||||
foreach (var amsUnit in printer.AmsUnits)
|
||||
{
|
||||
foreach (var slot in amsUnit.Slots)
|
||||
{
|
||||
if (slot.Spool != null && slot.Spool.IsActive && !slot.Spool.IsArchived)
|
||||
{
|
||||
return slot.Spool;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates derived grams from millimeters extruded using the standard formula:
|
||||
/// grams = mm_extruded × cross_section_area × material_density
|
||||
/// where cross_section_area = π × (diameter / 2)²
|
||||
/// </summary>
|
||||
private static decimal CalculateGrams(decimal mmExtruded, decimal diameterMm, decimal densityGperCm3)
|
||||
{
|
||||
if (mmExtruded <= 0) return 0m;
|
||||
|
||||
var radiusCm = (double)diameterMm / 2.0 / 10.0; // mm to cm
|
||||
var crossSectionAreaCm2 = Math.PI * radiusCm * radiusCm;
|
||||
var mmToCm = (double)mmExtruded / 10.0;
|
||||
|
||||
var grams = mmToCm * crossSectionAreaCm2 * (double)densityGperCm3;
|
||||
return (decimal)grams;
|
||||
}
|
||||
}
|
||||
390
backend/Infrastructure/Services/MoonrakerUsagePoller.cs
Normal file
390
backend/Infrastructure/Services/MoonrakerUsagePoller.cs
Normal file
@@ -0,0 +1,390 @@
|
||||
using Extrudex.Domain.DTOs.Moonraker;
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Enums;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Moonraker usage polling service.
|
||||
/// </summary>
|
||||
public class MoonrakerPollerOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// How often to poll each Moonraker printer for filament usage data.
|
||||
/// Default: 30 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for individual Moonraker HTTP requests.
|
||||
/// Default: 10 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(10);
|
||||
|
||||
/// <summary>
|
||||
/// Whether the polling service is enabled. Default: true.
|
||||
/// Set to false to disable polling (e.g., in development or testing).
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Background service that periodically polls Moonraker-connected printers
|
||||
/// for filament usage data. When a print job is detected as complete,
|
||||
/// the usage data is persisted to the FilamentUsage table via
|
||||
/// <see cref="IFilamentUsageService"/>.
|
||||
///
|
||||
/// <para>Polling logic:</para>
|
||||
/// <list type="number">
|
||||
/// <item>Query the database for all active printers with ConnectionType == Moonraker.</item>
|
||||
/// <item>For each printer, call <see cref="IMoonrakerClient.GetPrintStatsAsync"/> for live data
|
||||
/// and <see cref="IMoonrakerClient.GetPrintHistoryAsync"/> for completed job history.</item>
|
||||
/// <item>If usage data is available and the print state is "complete",
|
||||
/// create or update a FilamentUsage record.</item>
|
||||
/// <item>If the printer is unreachable or returns malformed data, log a warning
|
||||
/// and continue to the next printer (no crash).</item>
|
||||
/// </list>
|
||||
///
|
||||
/// <para>Error handling:</para>
|
||||
/// <list type="bullet">
|
||||
/// <item>API unreachable: logged as warning, poller continues for other printers.</item>
|
||||
/// <item>Malformed response: logged as warning, poller continues.</item>
|
||||
/// <item>Database errors: logged as error, poller continues.</item>
|
||||
/// </list>
|
||||
/// </summary>
|
||||
public class MoonrakerUsagePoller : BackgroundService
|
||||
{
|
||||
private readonly IServiceScopeFactory _scopeFactory;
|
||||
private readonly ILogger<MoonrakerUsagePoller> _logger;
|
||||
private readonly MoonrakerPollerOptions _options;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks which Moonraker print jobs have already been recorded,
|
||||
/// keyed by "printerId:gcodeFileName" to avoid duplicate recording.
|
||||
/// </summary>
|
||||
private readonly HashSet<string> _recordedJobs = new();
|
||||
|
||||
public MoonrakerUsagePoller(
|
||||
IServiceScopeFactory scopeFactory,
|
||||
ILogger<MoonrakerUsagePoller> logger,
|
||||
IOptions<MoonrakerPollerOptions> options)
|
||||
{
|
||||
_scopeFactory = scopeFactory;
|
||||
_logger = logger;
|
||||
_options = options.Value;
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogInformation("Moonraker usage poller is disabled via configuration.");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Moonraker usage poller starting. Poll interval: {Interval}",
|
||||
_options.PollInterval);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await PollAllPrintersAsync(stoppingToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Unexpected error in Moonraker usage poller cycle. Continuing.");
|
||||
}
|
||||
|
||||
await Task.Delay(_options.PollInterval, stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Moonraker usage poller stopping.");
|
||||
}
|
||||
|
||||
private async Task PollAllPrintersAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
using var scope = _scopeFactory.CreateScope();
|
||||
var dbContext = scope.ServiceProvider.GetRequiredService<ExtrudexDbContext>();
|
||||
var moonrakerClient = scope.ServiceProvider.GetRequiredService<IMoonrakerClient>();
|
||||
var usageService = scope.ServiceProvider.GetRequiredService<IFilamentUsageService>();
|
||||
|
||||
var printers = await dbContext.Printers
|
||||
.Where(p => p.IsActive && p.ConnectionType == ConnectionType.Moonraker)
|
||||
.ToListAsync(cancellationToken);
|
||||
|
||||
if (printers.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No active Moonraker printers found.");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug("Polling {Count} Moonraker printer(s).", printers.Count);
|
||||
|
||||
foreach (var printer in printers)
|
||||
{
|
||||
await PollPrinterAsync(
|
||||
printer, moonrakerClient, usageService, dbContext, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task PollPrinterAsync(
|
||||
Printer printer,
|
||||
IMoonrakerClient moonrakerClient,
|
||||
IFilamentUsageService usageService,
|
||||
ExtrudexDbContext dbContext,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Polling Moonraker printer {PrinterName} ({Host}:{Port})",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
|
||||
try
|
||||
{
|
||||
var printStats = await moonrakerClient.GetPrintStatsAsync(
|
||||
printer.HostnameOrIp,
|
||||
printer.Port,
|
||||
printer.ApiKey,
|
||||
cancellationToken);
|
||||
|
||||
if (printStats is null)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"No print stats available from printer {PrinterName}.", printer.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
printer.LastSeenAt = DateTime.UtcNow;
|
||||
await dbContext.SaveChangesAsync(cancellationToken);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Printer {PrinterName}: state={State}, filament={Mm}mm, file={File}",
|
||||
printer.Name, printStats.State, printStats.FilamentUsedMm, printStats.Filename);
|
||||
|
||||
decimal mmExtruded = printStats.FilamentUsedMm;
|
||||
if (mmExtruded <= 0)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Printer {PrinterName} has no filament usage to record.", printer.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!IsCompleteState(printStats.State))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Printer {PrinterName} print state '{State}' is not complete; skipping.",
|
||||
printer.Name, printStats.State);
|
||||
return;
|
||||
}
|
||||
|
||||
string gcodeFileName = printStats.Filename ?? $"unknown-{Guid.NewGuid():N}";
|
||||
var deduplicationKey = $"{printer.Id}:{gcodeFileName}";
|
||||
if (_recordedJobs.Contains(deduplicationKey))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Printer {PrinterName} job '{File}' already recorded; skipping.",
|
||||
printer.Name, gcodeFileName);
|
||||
return;
|
||||
}
|
||||
|
||||
DateTime? startedAt = null;
|
||||
DateTime? completedAt = null;
|
||||
try
|
||||
{
|
||||
var history = await moonrakerClient.GetPrintHistoryAsync(
|
||||
printer.HostnameOrIp, printer.Port, printer.ApiKey,
|
||||
limit: 1, cancellationToken);
|
||||
|
||||
if (history.Items.Count > 0)
|
||||
{
|
||||
var latestJob = history.Items[0];
|
||||
startedAt = latestJob.StartTime;
|
||||
completedAt = latestJob.EndTime;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogDebug(ex,
|
||||
"Could not fetch history for printer {PrinterName}; proceeding with stats only.",
|
||||
printer.Name);
|
||||
}
|
||||
|
||||
var printJob = await FindOrCreatePrintJobAsync(
|
||||
dbContext, printer, mmExtruded, gcodeFileName,
|
||||
startedAt, completedAt, cancellationToken);
|
||||
|
||||
if (printJob is null)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Could not find or create print job for printer {PrinterName}. No active spool found.",
|
||||
printer.Name);
|
||||
return;
|
||||
}
|
||||
|
||||
var spool = await dbContext.Spools.FindAsync(
|
||||
new object[] { printJob.SpoolId }, cancellationToken);
|
||||
|
||||
var gramsUsed = CalculateGramsUsed(mmExtruded, spool);
|
||||
|
||||
await usageService.RecordUsageAsync(
|
||||
printJobId: printJob.Id,
|
||||
spoolId: printJob.SpoolId,
|
||||
printerId: printer.Id,
|
||||
gramsUsed: gramsUsed,
|
||||
mmExtruded: mmExtruded,
|
||||
notes: $"Moonraker auto-recorded: {gcodeFileName}",
|
||||
cancellationToken: cancellationToken);
|
||||
|
||||
_recordedJobs.Add(deduplicationKey);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded Moonraker usage for printer {PrinterName}: {Mm}mm / {Grams}g, job '{File}'",
|
||||
printer.Name, mmExtruded, gramsUsed, gcodeFileName);
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Moonraker API unreachable for printer {PrinterName} ({Host}:{Port}). Will retry next cycle.",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
}
|
||||
catch (TaskCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (TaskCanceledException ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Moonraker request timed out for printer {PrinterName} ({Host}:{Port}).",
|
||||
printer.Name, printer.HostnameOrIp, printer.Port);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Unexpected error polling Moonraker printer {PrinterName}. Continuing to next printer.",
|
||||
printer.Name);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsCompleteState(string state) =>
|
||||
state.Equals("complete", StringComparison.OrdinalIgnoreCase) ||
|
||||
state.Equals("completed", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private async Task<PrintJob?> FindOrCreatePrintJobAsync(
|
||||
ExtrudexDbContext dbContext,
|
||||
Printer printer,
|
||||
decimal mmExtruded,
|
||||
string gcodeFileName,
|
||||
DateTime? startedAt,
|
||||
DateTime? completedAt,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(gcodeFileName))
|
||||
{
|
||||
var existingJob = await dbContext.PrintJobs
|
||||
.Where(j => j.PrinterId == printer.Id &&
|
||||
j.GcodeFilePath == gcodeFileName &&
|
||||
j.DataSource == DataSource.Moonraker &&
|
||||
j.Status != JobStatus.Cancelled)
|
||||
.OrderByDescending(j => j.CreatedAt)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
|
||||
if (existingJob is not null)
|
||||
{
|
||||
existingJob.MmExtruded = mmExtruded;
|
||||
existingJob.GramsDerived = CalculateGramsUsed(
|
||||
mmExtruded,
|
||||
await dbContext.Spools.FindAsync(
|
||||
new object[] { existingJob.SpoolId }, cancellationToken));
|
||||
existingJob.Status = JobStatus.Completed;
|
||||
existingJob.CompletedAt = completedAt ?? DateTime.UtcNow;
|
||||
existingJob.StartedAt ??= startedAt;
|
||||
await dbContext.SaveChangesAsync(cancellationToken);
|
||||
return existingJob;
|
||||
}
|
||||
}
|
||||
|
||||
var spool = await FindActiveSpoolForPrinterAsync(dbContext, printer, cancellationToken);
|
||||
if (spool is null) return null;
|
||||
|
||||
var gramsDerived = CalculateGramsUsed(mmExtruded, spool);
|
||||
|
||||
var newJob = new PrintJob
|
||||
{
|
||||
PrinterId = printer.Id,
|
||||
SpoolId = spool.Id,
|
||||
PrintName = gcodeFileName ?? "Moonraker Print",
|
||||
GcodeFilePath = gcodeFileName,
|
||||
MmExtruded = mmExtruded,
|
||||
GramsDerived = gramsDerived,
|
||||
FilamentDiameterAtPrintMm = spool.FilamentDiameterMm,
|
||||
MaterialDensityAtPrint = GetMaterialDensity(spool),
|
||||
DataSource = DataSource.Moonraker,
|
||||
Status = JobStatus.Completed,
|
||||
StartedAt = startedAt ?? DateTime.UtcNow,
|
||||
CompletedAt = completedAt ?? DateTime.UtcNow,
|
||||
Notes = "Auto-created by Moonraker usage poller"
|
||||
};
|
||||
|
||||
dbContext.PrintJobs.Add(newJob);
|
||||
await dbContext.SaveChangesAsync(cancellationToken);
|
||||
|
||||
return newJob;
|
||||
}
|
||||
|
||||
private static async Task<Spool?> FindActiveSpoolForPrinterAsync(
|
||||
ExtrudexDbContext dbContext,
|
||||
Printer printer,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var amsSpool = await dbContext.AmsSlots
|
||||
.Include(s => s.Spool)
|
||||
.ThenInclude(s => s!.MaterialBase)
|
||||
.Include(s => s.AmsUnit)
|
||||
.Where(s => s.AmsUnit.PrinterId == printer.Id && s.Spool != null && s.Spool.IsActive)
|
||||
.Select(s => s.Spool)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
|
||||
if (amsSpool is not null) return amsSpool;
|
||||
|
||||
return await dbContext.Spools
|
||||
.Include(s => s.MaterialBase)
|
||||
.Where(s => s.IsActive)
|
||||
.OrderByDescending(s => s.WeightRemainingGrams)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
}
|
||||
|
||||
private static decimal CalculateGramsUsed(decimal mmExtruded, Spool? spool)
|
||||
{
|
||||
if (spool is null) return 0m;
|
||||
var diameterMm = spool.FilamentDiameterMm;
|
||||
var densityGcm3 = GetMaterialDensity(spool);
|
||||
var radiusMm = diameterMm / 2m;
|
||||
var crossSectionArea = Math.PI * (double)radiusMm * (double)radiusMm;
|
||||
var volumeMm3 = (double)mmExtruded * crossSectionArea;
|
||||
var volumeCm3 = volumeMm3 / 1000.0;
|
||||
var grams = volumeCm3 * (double)densityGcm3;
|
||||
return Math.Round((decimal)grams, 2);
|
||||
}
|
||||
|
||||
private static decimal GetMaterialDensity(Spool? spool)
|
||||
{
|
||||
return spool?.MaterialBase?.Name?.ToUpperInvariant() switch
|
||||
{
|
||||
"PLA" => 1.24m,
|
||||
"PETG" => 1.27m,
|
||||
"ABS" => 1.04m,
|
||||
"ASA" => 1.07m,
|
||||
"TPU" => 1.21m,
|
||||
"NYLON" or "PA" => 1.13m,
|
||||
"PC" => 1.20m,
|
||||
_ => 1.24m
|
||||
};
|
||||
}
|
||||
}
|
||||
81
backend/Infrastructure/Services/UsageLogService.cs
Normal file
81
backend/Infrastructure/Services/UsageLogService.cs
Normal file
@@ -0,0 +1,81 @@
|
||||
using Extrudex.Domain.Entities;
|
||||
using Extrudex.Domain.Enums;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace Extrudex.Infrastructure.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of <see cref="IUsageLogService"/> that persists usage entries
|
||||
/// to the usage_logs table via EF Core.
|
||||
/// </summary>
|
||||
public class UsageLogService : IUsageLogService
|
||||
{
|
||||
private readonly ExtrudexDbContext _dbContext;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="UsageLogService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="dbContext">The EF Core database context for data persistence.</param>
|
||||
public UsageLogService(ExtrudexDbContext dbContext)
|
||||
{
|
||||
_dbContext = dbContext;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<UsageLog> RecordUsageAsync(
|
||||
Guid spoolId,
|
||||
decimal gramsUsed,
|
||||
DataSource dataSource,
|
||||
Guid? printerId = null,
|
||||
Guid? printJobId = null,
|
||||
decimal? mmExtruded = null,
|
||||
DateTime? usageTimestamp = null,
|
||||
string? notes = null)
|
||||
{
|
||||
var entry = new UsageLog
|
||||
{
|
||||
SpoolId = spoolId,
|
||||
GramsUsed = gramsUsed,
|
||||
DataSource = dataSource,
|
||||
PrinterId = printerId,
|
||||
PrintJobId = printJobId,
|
||||
MmExtruded = mmExtruded,
|
||||
UsageTimestamp = usageTimestamp ?? DateTime.UtcNow,
|
||||
Notes = notes
|
||||
};
|
||||
|
||||
_dbContext.UsageLogs.Add(entry);
|
||||
await _dbContext.SaveChangesAsync();
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IEnumerable<UsageLog>> GetBySpoolAsync(Guid spoolId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _dbContext.UsageLogs
|
||||
.Where(u => u.SpoolId == spoolId)
|
||||
.OrderByDescending(u => u.UsageTimestamp)
|
||||
.ToListAsync(cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IEnumerable<UsageLog>> GetByPrinterAsync(Guid printerId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _dbContext.UsageLogs
|
||||
.Where(u => u.PrinterId == printerId)
|
||||
.OrderByDescending(u => u.UsageTimestamp)
|
||||
.ToListAsync(cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<IEnumerable<UsageLog>> GetByPrintJobAsync(Guid printJobId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _dbContext.UsageLogs
|
||||
.Where(u => u.PrintJobId == printJobId)
|
||||
.OrderByDescending(u => u.UsageTimestamp)
|
||||
.ToListAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
using System.Reflection;
|
||||
using Extrudex.API.Filters;
|
||||
using Extrudex.API.Hubs;
|
||||
using Extrudex.API.Jobs;
|
||||
using Extrudex.Domain.Interfaces;
|
||||
using Extrudex.Infrastructure.Configuration;
|
||||
using Extrudex.Infrastructure.Data;
|
||||
using Extrudex.Infrastructure.Services;
|
||||
using FluentValidation;
|
||||
@@ -23,7 +26,10 @@ builder.Services.AddDbContext<ExtrudexDbContext>(options =>
|
||||
options.UseNpgsql(connectionString));
|
||||
|
||||
// ── API Services ───────────────────────────────────────────
|
||||
builder.Services.AddControllers();
|
||||
builder.Services.AddControllers(options =>
|
||||
{
|
||||
options.Filters.AddService<FluentValidationFilter>();
|
||||
});
|
||||
builder.Services.AddEndpointsApiExplorer();
|
||||
builder.Services.AddSwaggerGen(c =>
|
||||
{
|
||||
@@ -46,10 +52,31 @@ builder.Services.AddSwaggerGen(c =>
|
||||
// ── QR Code Generation ──────────────────────────────────────
|
||||
builder.Services.AddSingleton<IQrCodeService, QrCodeService>();
|
||||
|
||||
// ── Cost Per Print Calculation ─────────────────────────────
|
||||
builder.Services.AddScoped<ICostPerPrintService, CostPerPrintService>();
|
||||
|
||||
// ── Low Stock Detection ────────────────────────────────────
|
||||
builder.Services.AddSingleton<ILowStockDetector, LowStockDetector>();
|
||||
|
||||
// ── Usage Logging ───────────────────────────────────────────
|
||||
builder.Services.AddScoped<IUsageLogService, UsageLogService>();
|
||||
|
||||
// ── Filament Usage Service ──────────────────────────────────
|
||||
builder.Services.AddScoped<IFilamentUsageService, FilamentUsageService>();
|
||||
|
||||
// ── Moonraker Usage Poller (Background Service) ─────────────
|
||||
builder.Services.Configure<MoonrakerPollerOptions>(
|
||||
builder.Configuration.GetSection("MoonrakerPoller"));
|
||||
builder.Services.AddHostedService<MoonrakerUsagePoller>();
|
||||
|
||||
// ── FluentValidation ──────────────────────────────────────
|
||||
// Registers all validators from the API assembly into DI.
|
||||
builder.Services.AddValidatorsFromAssembly(Assembly.GetExecutingAssembly());
|
||||
|
||||
// Register the FluentValidation action filter so validators run automatically
|
||||
// on all API controller actions before the action executes.
|
||||
builder.Services.AddScoped<FluentValidationFilter>();
|
||||
|
||||
// ── CORS (kiosk + remote browser) ─────────────────────────
|
||||
// AllowAnyOrigin disallows credentials by spec; this is fine for
|
||||
// REST API calls. SignalR WebSockets negotiate without credentials
|
||||
@@ -69,6 +96,26 @@ builder.Services.AddCors(options =>
|
||||
// ── SignalR (real-time printer updates) ────────────────────
|
||||
builder.Services.AddSignalR();
|
||||
|
||||
// ── Filament Usage Sync (Background Job) ──────────────────
|
||||
builder.Services.Configure<FilamentUsageSyncOptions>(
|
||||
builder.Configuration.GetSection(FilamentUsageSyncOptions.SectionName));
|
||||
builder.Services.AddHttpClient<IMoonrakerClient, MoonrakerClient>(client =>
|
||||
{
|
||||
client.DefaultRequestHeaders.Add("User-Agent", "Extrudex/1.0");
|
||||
});
|
||||
builder.Services.AddScoped<IFilamentUsageSyncService, FilamentUsageSyncService>();
|
||||
builder.Services.AddHostedService<FilamentUsageSyncJob>();
|
||||
|
||||
// ── Moonraker Printer Sync (Background Service) ──────────
|
||||
builder.Services.Configure<MoonrakerPrinterSyncOptions>(
|
||||
builder.Configuration.GetSection(MoonrakerPrinterSyncOptions.SectionName));
|
||||
builder.Services.AddScoped<IMoonrakerPrinterSyncService, MoonrakerPrinterSyncService>();
|
||||
builder.Services.AddHostedService<MoonrakerPrinterSyncJob>();
|
||||
|
||||
// ── Health Checks ───────────────────────────────────────────
|
||||
builder.Services.AddHealthChecks()
|
||||
.AddNpgSql(connectionString);
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
// ── Middleware ──────────────────────────────────────────────
|
||||
@@ -85,6 +132,9 @@ app.MapControllers();
|
||||
// ── Hub Endpoints ───────────────────────────────────────────
|
||||
app.MapHub<PrinterHub>("/hubs/printer");
|
||||
|
||||
// ── Health Check Endpoint ──────────────────────────────────
|
||||
app.MapHealthChecks("/health");
|
||||
|
||||
app.Run();
|
||||
|
||||
// Helper: builds a connection string from individual env vars.
|
||||
|
||||
@@ -8,5 +8,10 @@
|
||||
},
|
||||
"ConnectionStrings": {
|
||||
"ExtrudexDb": "Host=localhost;Port=5432;Database=extrudex_dev;Username=extrudex;Password=changeme"
|
||||
},
|
||||
"FilamentUsageSync": {
|
||||
"PollingInterval": "00:01:00",
|
||||
"RequestTimeout": "00:00:30",
|
||||
"Enabled": true
|
||||
}
|
||||
}
|
||||
@@ -9,5 +9,25 @@
|
||||
"AllowedHosts": "*",
|
||||
"ConnectionStrings": {
|
||||
"ExtrudexDb": "Host=localhost;Port=5432;Database=extrudex;Username=extrudex;Password=changeme"
|
||||
},
|
||||
"FilamentUsageSync": {
|
||||
"PollingInterval": "00:05:00",
|
||||
"RequestTimeout": "00:00:30",
|
||||
"Enabled": true
|
||||
},
|
||||
"MoonrakerPrinterSync": {
|
||||
"PollingInterval": "00:01:00",
|
||||
"RequestTimeout": "00:00:15",
|
||||
"Enabled": true,
|
||||
"HistoryBatchSize": 25
|
||||
},
|
||||
"FilamentAlerts": {
|
||||
"LowStockThresholdPercent": 20
|
||||
},
|
||||
"MoonrakerPoller": {
|
||||
"Enabled": true,
|
||||
"PollInterval": "00:00:30",
|
||||
"RequestTimeout": "00:00:10"
|
||||
}
|
||||
}
|
||||
}
|
||||
110
backend/cmd/server/main.go
Normal file
110
backend/cmd/server/main.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/db"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/router"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/workers"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Setup structured logging
|
||||
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{
|
||||
Level: slog.LevelInfo,
|
||||
})))
|
||||
|
||||
// Load configuration
|
||||
cfg, err := config.Load()
|
||||
if err != nil {
|
||||
slog.Error("failed to load config", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
slog.Info("config loaded", "port", cfg.Port, "cors_origin", cfg.CorsOrigin)
|
||||
|
||||
// Connect to database
|
||||
dbPool, err := db.NewPool(cfg.DatabaseURL)
|
||||
if err != nil {
|
||||
slog.Error("failed to connect to database", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer db.ClosePool(dbPool)
|
||||
|
||||
slog.Info("database connected")
|
||||
|
||||
// Repositories (for background workers)
|
||||
printerRepo := repositories.NewPrinterRepository(dbPool)
|
||||
jobRepo := repositories.NewPrintJobRepository(dbPool)
|
||||
usageLogRepo := repositories.NewUsageLogRepository(dbPool)
|
||||
|
||||
// Create SSE broadcaster and start it
|
||||
sseBC := sse.NewBroadcaster(128)
|
||||
sseBC.Start()
|
||||
defer sseBC.Stop()
|
||||
|
||||
slog.Info("sse broadcaster started")
|
||||
|
||||
// Start background workers
|
||||
mrCfg := workers.DefaultMoonrakerPollerConfig()
|
||||
mrPoller := workers.NewMoonrakerPoller(mrCfg, dbPool, printerRepo, jobRepo, usageLogRepo, sseBC)
|
||||
mrPoller.Start()
|
||||
defer mrPoller.Stop()
|
||||
|
||||
mqttCfg := workers.DefaultMQTTSubscriberConfig()
|
||||
mqttSub := workers.NewMQTTSubscriber(mqttCfg, dbPool, printerRepo, usageLogRepo, sseBC)
|
||||
mqttSub.Start()
|
||||
defer mqttSub.Stop()
|
||||
|
||||
slog.Info("background workers started")
|
||||
|
||||
// Create router
|
||||
r := router.New(cfg, dbPool, sseBC)
|
||||
|
||||
// Create HTTP server
|
||||
// WriteTimeout is 0 for SSE support — the Chi middleware.Timeout(60s)
|
||||
// handles request-level timeouts on non-SSE routes.
|
||||
server := &http.Server{
|
||||
Addr: ":" + cfg.Port,
|
||||
Handler: r,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
WriteTimeout: 0, // disabled for SSE long-lived connections
|
||||
IdleTimeout: 60 * time.Second,
|
||||
}
|
||||
|
||||
// Start server in goroutine
|
||||
go func() {
|
||||
slog.Info("server starting", "addr", server.Addr)
|
||||
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
slog.Error("server error", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
// Wait for shutdown signal
|
||||
quit := make(chan os.Signal, 1)
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
|
||||
slog.Info("server shutting down")
|
||||
|
||||
// Graceful shutdown
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := server.Shutdown(ctx); err != nil {
|
||||
slog.Error("server shutdown error", "error", err)
|
||||
}
|
||||
|
||||
db.ClosePool(dbPool)
|
||||
slog.Info("server stopped")
|
||||
}
|
||||
18
backend/go.mod
Normal file
18
backend/go.mod
Normal file
@@ -0,0 +1,18 @@
|
||||
module github.com/CubeCraft-Creations/Extrudex/backend
|
||||
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/go-chi/chi/v5 v5.2.0
|
||||
github.com/jackc/pgx/v5 v5.7.4
|
||||
github.com/kelseyhightower/envconfig v1.4.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
golang.org/x/crypto v0.31.0 // indirect
|
||||
golang.org/x/sync v0.10.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
)
|
||||
32
backend/go.sum
Normal file
32
backend/go.sum
Normal file
@@ -0,0 +1,32 @@
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-chi/chi/v5 v5.2.0 h1:Aj1EtB0qR2Rdo2dG4O94RIU35w2lvQSj6BRA4+qwFL0=
|
||||
github.com/go-chi/chi/v5 v5.2.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg=
|
||||
github.com/jackc/pgx/v5 v5.7.4/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=
|
||||
github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
161
backend/internal/clients/moonraker.go
Normal file
161
backend/internal/clients/moonraker.go
Normal file
@@ -0,0 +1,161 @@
|
||||
// Package clients provides third-party printer integrations.
|
||||
package clients
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// MoonrakerPrinterInfo represents the response from /api/printer/info.
|
||||
type MoonrakerPrinterInfo struct {
|
||||
State string `json:"state"`
|
||||
Hostname string `json:"hostname,omitempty"`
|
||||
SoftwareVersion string `json:"software_version,omitempty"`
|
||||
}
|
||||
|
||||
// MoonrakerPrintStats represents the response from /api/printer/print_stats.
|
||||
type MoonrakerPrintStats struct {
|
||||
State string `json:"state"`
|
||||
Filename string `json:"filename,omitempty"`
|
||||
FilamentUsedMm float64 `json:"filament_used,omitempty"`
|
||||
TotalDuration float64 `json:"total_duration,omitempty"`
|
||||
PrintDuration float64 `json:"print_duration,omitempty"`
|
||||
Message string `json:"message,omitempty"`
|
||||
}
|
||||
|
||||
// MoonrakerPrintJob represents a single job from the history API.
|
||||
type MoonrakerPrintJob struct {
|
||||
JobID string `json:"job_id,omitempty"`
|
||||
Filename string `json:"filename"`
|
||||
Status string `json:"status"`
|
||||
StartTime time.Time `json:"start_time"`
|
||||
EndTime time.Time `json:"end_time,omitempty"`
|
||||
FilamentUsedMm float64 `json:"filament_used,omitempty"`
|
||||
TotalDuration float64 `json:"total_duration,omitempty"`
|
||||
}
|
||||
|
||||
// MoonrakerHistoryResponse represents the response from /api/server/history/job.
|
||||
type MoonrakerHistoryResponse struct {
|
||||
Items []MoonrakerPrintJob `json:"jobs"`
|
||||
}
|
||||
|
||||
// MoonrakerClient is an HTTP client for the Moonraker API.
|
||||
type MoonrakerClient struct {
|
||||
HTTPClient *http.Client
|
||||
}
|
||||
|
||||
// NewMoonrakerClient creates a MoonrakerClient with the given request timeout.
|
||||
func NewMoonrakerClient(timeout time.Duration) *MoonrakerClient {
|
||||
return &MoonrakerClient{
|
||||
HTTPClient: &http.Client{Timeout: timeout},
|
||||
}
|
||||
}
|
||||
|
||||
// baseURL builds the Moonraker base URL from host and port.
|
||||
func (c *MoonrakerClient) baseURL(host string, port int) string {
|
||||
if port == 0 {
|
||||
port = 80
|
||||
}
|
||||
return fmt.Sprintf("http://%s:%d", host, port)
|
||||
}
|
||||
|
||||
// GetPrinterInfo fetches printer info from Moonraker.
|
||||
func (c *MoonrakerClient) GetPrinterInfo(ctx context.Context, host string, port int, apiKey string) (*MoonrakerPrinterInfo, error) {
|
||||
url := c.baseURL(host, port) + "/api/printer/info"
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if apiKey != "" {
|
||||
req.Header.Set("X-Api-Key", apiKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrinterInfo request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("moonraker getPrinterInfo returned status %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var body struct {
|
||||
Result MoonrakerPrinterInfo `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrinterInfo decode failed: %w", err)
|
||||
}
|
||||
|
||||
slog.Debug("moonraker printer info", "host", host, "state", body.Result.State)
|
||||
return &body.Result, nil
|
||||
}
|
||||
|
||||
// GetPrintStats fetches current print statistics from Moonraker.
|
||||
func (c *MoonrakerClient) GetPrintStats(ctx context.Context, host string, port int, apiKey string) (*MoonrakerPrintStats, error) {
|
||||
url := c.baseURL(host, port) + "/api/printer/print_stats"
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if apiKey != "" {
|
||||
req.Header.Set("X-Api-Key", apiKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrintStats request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("moonraker getPrintStats returned status %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var body struct {
|
||||
Result MoonrakerPrintStats `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrintStats decode failed: %w", err)
|
||||
}
|
||||
|
||||
slog.Debug("moonraker print stats", "host", host, "state", body.Result.State, "filename", body.Result.Filename)
|
||||
return &body.Result, nil
|
||||
}
|
||||
|
||||
// GetPrintHistory fetches completed print job history from Moonraker.
|
||||
func (c *MoonrakerClient) GetPrintHistory(ctx context.Context, host string, port int, apiKey string, limit int) (*MoonrakerHistoryResponse, error) {
|
||||
if limit <= 0 {
|
||||
limit = 25
|
||||
}
|
||||
url := fmt.Sprintf("%s/api/server/history/job?limit=%d", c.baseURL(host, port), limit)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if apiKey != "" {
|
||||
req.Header.Set("X-Api-Key", apiKey)
|
||||
}
|
||||
|
||||
resp, err := c.HTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrintHistory request failed: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("moonraker getPrintHistory returned status %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var body MoonrakerHistoryResponse
|
||||
if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
|
||||
return nil, fmt.Errorf("moonraker getPrintHistory decode failed: %w", err)
|
||||
}
|
||||
|
||||
slog.Debug("moonraker print history", "host", host, "count", len(body.Items))
|
||||
return &body, nil
|
||||
}
|
||||
119
backend/internal/clients/mqtt.go
Normal file
119
backend/internal/clients/mqtt.go
Normal file
@@ -0,0 +1,119 @@
|
||||
// Package clients provides third-party printer integrations.
|
||||
package clients
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
mqtt "github.com/eclipse/paho.mqtt.golang"
|
||||
)
|
||||
|
||||
// MQTTClient wraps the Eclipse Paho MQTT client for printer telemetry.
|
||||
type MQTTClient struct {
|
||||
client mqtt.Client
|
||||
}
|
||||
|
||||
// MQTTConfig holds per-printer MQTT connection settings.
|
||||
type MQTTConfig struct {
|
||||
BrokerHost string
|
||||
BrokerPort int
|
||||
TopicPrefix string
|
||||
TLSEnabled bool
|
||||
ClientID string
|
||||
}
|
||||
|
||||
// BambuPrintStatus is the known Bambu Lab print-status payload shape.
|
||||
type BambuPrintStatus struct {
|
||||
Print struct {
|
||||
GcodeFile string `json:"gcode_file,omitempty"`
|
||||
Stage int `json:"stage,omitempty"`
|
||||
SubTaskName string `json:"subtask_name,omitempty"`
|
||||
PrintType string `json:"print_type,omitempty"`
|
||||
FilamentUsedMm float64 `json:"mc_percent,omitempty"` // placeholder; real telemetry varies
|
||||
} `json:"print,omitempty"`
|
||||
}
|
||||
|
||||
// NewMQTTClient creates an MQTT client connected to the given broker.
|
||||
func NewMQTTClient(cfg MQTTConfig) (*MQTTClient, error) {
|
||||
if cfg.BrokerPort == 0 {
|
||||
if cfg.TLSEnabled {
|
||||
cfg.BrokerPort = 8883
|
||||
} else {
|
||||
cfg.BrokerPort = 1883
|
||||
}
|
||||
}
|
||||
if cfg.ClientID == "" {
|
||||
cfg.ClientID = fmt.Sprintf("extrudex-%d", time.Now().Unix())
|
||||
}
|
||||
|
||||
opts := mqtt.NewClientOptions().
|
||||
AddBroker(fmt.Sprintf("tcp://%s:%d", cfg.BrokerHost, cfg.BrokerPort)).
|
||||
SetClientID(cfg.ClientID).
|
||||
SetAutoReconnect(true).
|
||||
SetConnectTimeout(10 * time.Second).
|
||||
SetOrderMatters(false)
|
||||
|
||||
if cfg.TLSEnabled {
|
||||
opts = opts.SetTLSConfig(&tls.Config{InsecureSkipVerify: false})
|
||||
}
|
||||
|
||||
client := mqtt.NewClient(opts)
|
||||
token := client.Connect()
|
||||
if token.Wait() && token.Error() != nil {
|
||||
return nil, fmt.Errorf("mqtt connect failed: %w", token.Error())
|
||||
}
|
||||
|
||||
slog.Info("mqtt client connected", "broker", cfg.BrokerHost, "port", cfg.BrokerPort, "tls", cfg.TLSEnabled)
|
||||
return &MQTTClient{client: client}, nil
|
||||
}
|
||||
|
||||
// Subscribe registers a callback for messages matching topic.
|
||||
func (c *MQTTClient) Subscribe(topic string, qos byte, callback func([]byte)) error {
|
||||
token := c.client.Subscribe(topic, qos, func(_ mqtt.Client, msg mqtt.Message) {
|
||||
callback(msg.Payload())
|
||||
})
|
||||
if token.Wait() && token.Error() != nil {
|
||||
return fmt.Errorf("mqtt subscribe failed: %w", token.Error())
|
||||
}
|
||||
slog.Info("mqtt subscribed", "topic", topic, "qos", qos)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Unsubscribe removes a subscription.
|
||||
func (c *MQTTClient) Unsubscribe(topics ...string) error {
|
||||
token := c.client.Unsubscribe(topics...)
|
||||
if token.Wait() && token.Error() != nil {
|
||||
return fmt.Errorf("mqtt unsubscribe failed: %w", token.Error())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disconnect cleanly disconnects the MQTT client.
|
||||
func (c *MQTTClient) Disconnect(quiesceMs uint) {
|
||||
c.client.Disconnect(quiesceMs)
|
||||
}
|
||||
|
||||
// IsConnected returns whether the underlying client is connected.
|
||||
func (c *MQTTClient) IsConnected() bool {
|
||||
return c.client.IsConnected()
|
||||
}
|
||||
|
||||
// ParseBambuTelemetry attempts to parse a Bambu Lab telemetry JSON payload.
|
||||
func ParseBambuTelemetry(payload []byte) (*BambuPrintStatus, error) {
|
||||
var msg BambuPrintStatus
|
||||
if err := json.Unmarshal(payload, &msg); err != nil {
|
||||
return nil, fmt.Errorf("parse bambu telemetry failed: %w", err)
|
||||
}
|
||||
return &msg, nil
|
||||
}
|
||||
|
||||
// DefaultBambuTopics returns the default topic patterns for Bambu Lab printers.
|
||||
func DefaultBambuTopics(topicPrefix string) []string {
|
||||
return []string{
|
||||
topicPrefix + "/report",
|
||||
}
|
||||
}
|
||||
24
backend/internal/config/config.go
Normal file
24
backend/internal/config/config.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/kelseyhightower/envconfig"
|
||||
)
|
||||
|
||||
// Config holds all application configuration loaded from environment variables.
|
||||
type Config struct {
|
||||
DatabaseURL string `envconfig:"database_url" required:"true"`
|
||||
Port string `envconfig:"port" default:"8080"`
|
||||
CorsOrigin string `envconfig:"cors_origin" default:"*"`
|
||||
LogLevel string `envconfig:"log_level" default:"info"`
|
||||
}
|
||||
|
||||
// Load reads configuration from environment variables and returns a populated Config.
|
||||
func Load() (*Config, error) {
|
||||
var cfg Config
|
||||
if err := envconfig.Process("", &cfg); err != nil {
|
||||
return nil, fmt.Errorf("failed to load config: %w", err)
|
||||
}
|
||||
return &cfg, nil
|
||||
}
|
||||
34
backend/internal/db/db.go
Normal file
34
backend/internal/db/db.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// NewPool creates a new pgx connection pool and verifies connectivity with a ping.
|
||||
func NewPool(databaseURL string) (*pgxpool.Pool, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
pool, err := pgxpool.New(ctx, databaseURL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create db pool: %w", err)
|
||||
}
|
||||
|
||||
if err := pool.Ping(ctx); err != nil {
|
||||
pool.Close()
|
||||
return nil, fmt.Errorf("failed to ping db: %w", err)
|
||||
}
|
||||
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
// ClosePool gracefully closes the connection pool.
|
||||
func ClosePool(pool *pgxpool.Pool) {
|
||||
if pool != nil {
|
||||
pool.Close()
|
||||
}
|
||||
}
|
||||
67
backend/internal/dtos/dtos.go
Normal file
67
backend/internal/dtos/dtos.go
Normal file
@@ -0,0 +1,67 @@
|
||||
// Package dtos defines request/response data transfer objects for the Extrudex API.
|
||||
// DTOs keep HTTP serialization concerns separate from domain models.
|
||||
package dtos
|
||||
|
||||
// ============================================================================
|
||||
// Common Response Wrappers
|
||||
// ============================================================================
|
||||
|
||||
// ListResponse wraps a paginated collection response.
|
||||
type ListResponse struct {
|
||||
Data any `json:"data"`
|
||||
Total int `json:"total"`
|
||||
Limit int `json:"limit"`
|
||||
Offset int `json:"offset"`
|
||||
}
|
||||
|
||||
// SingleResponse wraps a single-item response.
|
||||
type SingleResponse struct {
|
||||
Data any `json:"data"`
|
||||
}
|
||||
|
||||
// ErrorResponse is the standard error payload for all API errors.
|
||||
type ErrorResponse struct {
|
||||
Error string `json:"error"`
|
||||
Code int `json:"code"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Filament DTOs
|
||||
// ============================================================================
|
||||
|
||||
// CreateFilamentRequest is the POST body for creating a new filament spool.
|
||||
type CreateFilamentRequest struct {
|
||||
Name string `json:"name"`
|
||||
MaterialBaseID int `json:"material_base_id"`
|
||||
MaterialFinishID int `json:"material_finish_id"`
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
ColorHex string `json:"color_hex"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM *float64 `json:"diameter_mm,omitempty"` // defaults to 1.75
|
||||
InitialGrams int `json:"initial_grams"`
|
||||
RemainingGrams int `json:"remaining_grams"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"` // defaults to 50
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
}
|
||||
|
||||
// UpdateFilamentRequest is the PUT body for partially updating a filament spool.
|
||||
// All fields are optional — only non-nil fields are applied.
|
||||
type UpdateFilamentRequest struct {
|
||||
Name *string `json:"name,omitempty"`
|
||||
MaterialBaseID *int `json:"material_base_id,omitempty"`
|
||||
MaterialFinishID *int `json:"material_finish_id,omitempty"`
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
ColorHex *string `json:"color_hex,omitempty"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM *float64 `json:"diameter_mm,omitempty"`
|
||||
InitialGrams *int `json:"initial_grams,omitempty"`
|
||||
RemainingGrams *int `json:"remaining_grams,omitempty"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams *int `json:"low_stock_threshold_grams,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
}
|
||||
273
backend/internal/handlers/filament_handler.go
Normal file
273
backend/internal/handlers/filament_handler.go
Normal file
@@ -0,0 +1,273 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
"github.com/go-chi/chi/v5"
|
||||
)
|
||||
|
||||
// FilamentHandler handles HTTP requests for filament spool CRUD operations.
|
||||
type FilamentHandler struct {
|
||||
service *services.FilamentService
|
||||
}
|
||||
|
||||
// NewFilamentHandler creates a FilamentHandler with the given service.
|
||||
func NewFilamentHandler(service *services.FilamentService) *FilamentHandler {
|
||||
return &FilamentHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/filaments — returns paginated, filtered spools.
|
||||
func (h *FilamentHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.FilamentFilter{
|
||||
Material: r.URL.Query().Get("material"),
|
||||
Finish: r.URL.Query().Get("finish"),
|
||||
Color: r.URL.Query().Get("color"),
|
||||
LowStock: r.URL.Query().Get("low_stock") == "true",
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
spools, total, err := h.service.List(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list filaments", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: spools,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
|
||||
// Get handles GET /api/filaments/{id} — returns a single spool.
|
||||
func (h *FilamentHandler) Get(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
spool, err := h.service.GetByID(r.Context(), id)
|
||||
if err != nil {
|
||||
slog.Error("failed to get filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if spool == nil {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: spool})
|
||||
}
|
||||
|
||||
// Create handles POST /api/filaments — creates a new filament spool.
|
||||
func (h *FilamentHandler) Create(w http.ResponseWriter, r *http.Request) {
|
||||
var req dtos.CreateFilamentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid request body",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Validate required fields.
|
||||
if err := services.ValidateCreateFilamentRequest(req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "validation failed: " + err.Error(),
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Build domain model.
|
||||
spool := models.FilamentSpool{
|
||||
Name: req.Name,
|
||||
MaterialBaseID: req.MaterialBaseID,
|
||||
MaterialFinishID: req.MaterialFinishID,
|
||||
MaterialModifierID: req.MaterialModifierID,
|
||||
ColorHex: req.ColorHex,
|
||||
Brand: req.Brand,
|
||||
DiameterMM: 1.75, // default
|
||||
InitialGrams: req.InitialGrams,
|
||||
RemainingGrams: req.RemainingGrams,
|
||||
SpoolWeightGrams: req.SpoolWeightGrams,
|
||||
CostUSD: req.CostUSD,
|
||||
LowStockThresholdGrams: 50, // default
|
||||
Notes: req.Notes,
|
||||
Barcode: req.Barcode,
|
||||
}
|
||||
if req.DiameterMM != nil {
|
||||
spool.DiameterMM = *req.DiameterMM
|
||||
}
|
||||
if req.LowStockThresholdGrams != nil {
|
||||
spool.LowStockThresholdGrams = *req.LowStockThresholdGrams
|
||||
}
|
||||
|
||||
created, err := h.service.Create(r.Context(), &spool)
|
||||
if err != nil {
|
||||
slog.Error("failed to create filament", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusCreated, dtos.SingleResponse{Data: created})
|
||||
}
|
||||
|
||||
// Update handles PUT /api/filaments/{id} — partially updates a spool.
|
||||
func (h *FilamentHandler) Update(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var req dtos.UpdateFilamentRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid request body",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Validate update fields.
|
||||
if err := services.ValidateUpdateFilamentRequest(req); err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "validation failed: " + err.Error(),
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Build updates map (only non-nil fields).
|
||||
updates := buildFilamentUpdates(req)
|
||||
|
||||
updated, err := h.service.Update(r.Context(), id, updates)
|
||||
if err != nil {
|
||||
slog.Error("failed to update filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if updated == nil {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: updated})
|
||||
}
|
||||
|
||||
// Delete handles DELETE /api/filaments/{id} — soft-deletes a spool.
|
||||
func (h *FilamentHandler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||
id, err := strconv.Atoi(chi.URLParam(r, "id"))
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid filament ID",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
deleted, err := h.service.SoftDelete(r.Context(), id)
|
||||
if err != nil {
|
||||
slog.Error("failed to delete filament", "id", id, "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
if !deleted {
|
||||
writeJSON(w, http.StatusNotFound, dtos.ErrorResponse{
|
||||
Error: "filament not found",
|
||||
Code: http.StatusNotFound,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// buildFilamentUpdates converts an UpdateFilamentRequest to a map of column→value.
|
||||
func buildFilamentUpdates(req dtos.UpdateFilamentRequest) map[string]interface{} {
|
||||
updates := make(map[string]interface{})
|
||||
if req.Name != nil {
|
||||
updates["name"] = *req.Name
|
||||
}
|
||||
if req.MaterialBaseID != nil {
|
||||
updates["material_base_id"] = *req.MaterialBaseID
|
||||
}
|
||||
if req.MaterialFinishID != nil {
|
||||
updates["material_finish_id"] = *req.MaterialFinishID
|
||||
}
|
||||
if req.MaterialModifierID != nil {
|
||||
updates["material_modifier_id"] = *req.MaterialModifierID
|
||||
}
|
||||
if req.ColorHex != nil {
|
||||
updates["color_hex"] = *req.ColorHex
|
||||
}
|
||||
if req.Brand != nil {
|
||||
updates["brand"] = *req.Brand
|
||||
}
|
||||
if req.DiameterMM != nil {
|
||||
updates["diameter_mm"] = *req.DiameterMM
|
||||
}
|
||||
if req.InitialGrams != nil {
|
||||
updates["initial_grams"] = *req.InitialGrams
|
||||
}
|
||||
if req.RemainingGrams != nil {
|
||||
updates["remaining_grams"] = *req.RemainingGrams
|
||||
}
|
||||
if req.SpoolWeightGrams != nil {
|
||||
updates["spool_weight_grams"] = *req.SpoolWeightGrams
|
||||
}
|
||||
if req.CostUSD != nil {
|
||||
updates["cost_usd"] = *req.CostUSD
|
||||
}
|
||||
if req.LowStockThresholdGrams != nil {
|
||||
updates["low_stock_threshold_grams"] = *req.LowStockThresholdGrams
|
||||
}
|
||||
if req.Notes != nil {
|
||||
updates["notes"] = *req.Notes
|
||||
}
|
||||
if req.Barcode != nil {
|
||||
updates["barcode"] = *req.Barcode
|
||||
}
|
||||
return updates
|
||||
}
|
||||
50
backend/internal/handlers/health.go
Normal file
50
backend/internal/handlers/health.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// HealthHandler provides a health check endpoint that verifies database connectivity.
|
||||
type HealthHandler struct {
|
||||
dbPool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewHealthHandler creates a new HealthHandler with the given database pool.
|
||||
func NewHealthHandler(dbPool *pgxpool.Pool) *HealthHandler {
|
||||
return &HealthHandler{dbPool: dbPool}
|
||||
}
|
||||
|
||||
// ServeHTTP handles GET /health requests.
|
||||
func (h *HealthHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
dbConnected := false
|
||||
if h.dbPool != nil {
|
||||
if err := h.dbPool.Ping(ctx); err == nil {
|
||||
dbConnected = true
|
||||
} else {
|
||||
slog.Warn("health check db ping failed", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
resp := map[string]any{
|
||||
"status": "ok",
|
||||
"timestamp": time.Now().UTC().Format(time.RFC3339),
|
||||
"db_connected": dbConnected,
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if !dbConnected {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
}
|
||||
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
||||
slog.Error("failed to encode health response", "error", err)
|
||||
}
|
||||
}
|
||||
51
backend/internal/handlers/helpers.go
Normal file
51
backend/internal/handlers/helpers.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// writeJSON serializes v as JSON to the response writer with the given status code.
|
||||
// Logs an error if encoding fails.
|
||||
func writeJSON(w http.ResponseWriter, status int, v interface{}) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
if err := json.NewEncoder(w).Encode(v); err != nil {
|
||||
slog.Error("failed to encode JSON response", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// parsePagination reads limit and offset query parameters with defaults of 20 and 0.
|
||||
func parsePagination(r *http.Request) (limit, offset int) {
|
||||
limit = 20
|
||||
offset = 0
|
||||
|
||||
if l := r.URL.Query().Get("limit"); l != "" {
|
||||
if parsed, err := strconv.Atoi(l); err == nil && parsed > 0 {
|
||||
limit = parsed
|
||||
}
|
||||
}
|
||||
if o := r.URL.Query().Get("offset"); o != "" {
|
||||
if parsed, err := strconv.Atoi(o); err == nil && parsed >= 0 {
|
||||
offset = parsed
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ValidateCreateFilamentRequest validates a CreateFilamentRequest DTO.
|
||||
// Re-exports the service-layer validator for handler use.
|
||||
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||
return services.ValidateCreateFilamentRequest(req)
|
||||
}
|
||||
|
||||
// ValidateUpdateFilamentRequest validates an UpdateFilamentRequest DTO.
|
||||
// Re-exports the service-layer validator for handler use.
|
||||
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||
return services.ValidateUpdateFilamentRequest(req)
|
||||
}
|
||||
34
backend/internal/handlers/material_handler.go
Normal file
34
backend/internal/handlers/material_handler.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// MaterialHandler handles requests for material lookup data.
|
||||
type MaterialHandler struct {
|
||||
repo *repositories.MaterialRepository
|
||||
}
|
||||
|
||||
// NewMaterialHandler creates a MaterialHandler with the given repository.
|
||||
func NewMaterialHandler(repo *repositories.MaterialRepository) *MaterialHandler {
|
||||
return &MaterialHandler{repo: repo}
|
||||
}
|
||||
|
||||
// List handles GET /api/materials — returns all material bases.
|
||||
func (h *MaterialHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
materials, err := h.repo.GetAll(r.Context())
|
||||
if err != nil {
|
||||
slog.Error("failed to list materials", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: materials})
|
||||
}
|
||||
60
backend/internal/handlers/print_job_handler.go
Normal file
60
backend/internal/handlers/print_job_handler.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// PrintJobHandler handles HTTP requests for print job operations.
|
||||
type PrintJobHandler struct {
|
||||
service *services.PrintJobService
|
||||
}
|
||||
|
||||
// NewPrintJobHandler creates a PrintJobHandler with the given service.
|
||||
func NewPrintJobHandler(service *services.PrintJobService) *PrintJobHandler {
|
||||
return &PrintJobHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/print-jobs — returns paginated, filtered print jobs.
|
||||
func (h *PrintJobHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.PrintJobFilter{
|
||||
Status: r.URL.Query().Get("status"),
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
if pidStr := r.URL.Query().Get("printer_id"); pidStr != "" {
|
||||
pid, err := strconv.Atoi(pidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid printer_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.PrinterID = &pid
|
||||
}
|
||||
|
||||
jobs, total, err := h.service.List(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list print jobs", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: jobs,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
34
backend/internal/handlers/printer_handler.go
Normal file
34
backend/internal/handlers/printer_handler.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
)
|
||||
|
||||
// PrinterHandler handles HTTP requests for printer listings.
|
||||
type PrinterHandler struct {
|
||||
service *services.PrinterService
|
||||
}
|
||||
|
||||
// NewPrinterHandler creates a PrinterHandler with the given service.
|
||||
func NewPrinterHandler(service *services.PrinterService) *PrinterHandler {
|
||||
return &PrinterHandler{service: service}
|
||||
}
|
||||
|
||||
// List handles GET /api/printers — returns all printers with printer_type info.
|
||||
func (h *PrinterHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
printers, err := h.service.List(r.Context())
|
||||
if err != nil {
|
||||
slog.Error("failed to list printers", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.SingleResponse{Data: printers})
|
||||
}
|
||||
70
backend/internal/handlers/usage_log_handler.go
Normal file
70
backend/internal/handlers/usage_log_handler.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// UsageLogHandler handles HTTP requests for usage log operations.
|
||||
type UsageLogHandler struct {
|
||||
repo *repositories.UsageLogRepository
|
||||
}
|
||||
|
||||
// NewUsageLogHandler creates a UsageLogHandler with the given repository.
|
||||
func NewUsageLogHandler(repo *repositories.UsageLogRepository) *UsageLogHandler {
|
||||
return &UsageLogHandler{repo: repo}
|
||||
}
|
||||
|
||||
// List handles GET /api/usage-logs — returns paginated, filtered usage logs.
|
||||
func (h *UsageLogHandler) List(w http.ResponseWriter, r *http.Request) {
|
||||
limit, offset := parsePagination(r)
|
||||
filter := repositories.UsageLogFilter{
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
}
|
||||
|
||||
if sidStr := r.URL.Query().Get("spool_id"); sidStr != "" {
|
||||
sid, err := strconv.Atoi(sidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid spool_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.SpoolID = &sid
|
||||
}
|
||||
|
||||
if jidStr := r.URL.Query().Get("job_id"); jidStr != "" {
|
||||
jid, err := strconv.Atoi(jidStr)
|
||||
if err != nil {
|
||||
writeJSON(w, http.StatusBadRequest, dtos.ErrorResponse{
|
||||
Error: "invalid job_id",
|
||||
Code: http.StatusBadRequest,
|
||||
})
|
||||
return
|
||||
}
|
||||
filter.JobID = &jid
|
||||
}
|
||||
|
||||
logs, total, err := h.repo.GetAll(r.Context(), filter)
|
||||
if err != nil {
|
||||
slog.Error("failed to list usage logs", "error", err)
|
||||
writeJSON(w, http.StatusInternalServerError, dtos.ErrorResponse{
|
||||
Error: "internal server error",
|
||||
Code: http.StatusInternalServerError,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, http.StatusOK, dtos.ListResponse{
|
||||
Data: logs,
|
||||
Total: total,
|
||||
Limit: limit,
|
||||
Offset: offset,
|
||||
})
|
||||
}
|
||||
162
backend/internal/models/models.go
Normal file
162
backend/internal/models/models.go
Normal file
@@ -0,0 +1,162 @@
|
||||
// Package models defines the Extrudex domain model structs.
|
||||
// These map 1:1 to PostgreSQL tables with snake_case JSON serialization.
|
||||
// Nullable fields use pointer types; all timestamps are time.Time.
|
||||
package models
|
||||
|
||||
import "time"
|
||||
|
||||
// ============================================================================
|
||||
// Lookup Tables
|
||||
// ============================================================================
|
||||
|
||||
// PrinterType represents a printer technology category (fdm, resin, etc.).
|
||||
type PrinterType struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// JobStatus represents a print job lifecycle state.
|
||||
type JobStatus struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialBase represents a base material type (PLA, PETG, ABS, etc.).
|
||||
// Density and temperature ranges are stored here for grams-calculation and slicing guidance.
|
||||
type MaterialBase struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
DensityGCm3 float64 `json:"density_g_cm3"`
|
||||
ExtrusionTempMin *int `json:"extrusion_temp_min,omitempty"`
|
||||
ExtrusionTempMax *int `json:"extrusion_temp_max,omitempty"`
|
||||
BedTempMin *int `json:"bed_temp_min,omitempty"`
|
||||
BedTempMax *int `json:"bed_temp_max,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialFinish represents the visual/texture finish (Basic, Silk, Matte, etc.).
|
||||
type MaterialFinish struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MaterialModifier represents an additive property (Carbon Fiber, Wood-Filled, etc.).
|
||||
type MaterialModifier struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Core Entity Tables
|
||||
// ============================================================================
|
||||
|
||||
// Printer represents a 3D printer in the fleet.
|
||||
type Printer struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
PrinterTypeID int `json:"printer_type_id"`
|
||||
PrinterType *PrinterType `json:"printer_type,omitempty"` // populated on JOIN queries
|
||||
Manufacturer *string `json:"manufacturer,omitempty"`
|
||||
Model *string `json:"model,omitempty"`
|
||||
MoonrakerURL *string `json:"moonraker_url,omitempty"`
|
||||
MoonrakerAPIKey *string `json:"moonraker_api_key,omitempty"`
|
||||
MQTTBrokerHost *string `json:"mqtt_broker_host,omitempty"`
|
||||
MQTTTopicPrefix *string `json:"mqtt_topic_prefix,omitempty"`
|
||||
MQTTTLSEnabled bool `json:"mqtt_tls_enabled"`
|
||||
IsActive bool `json:"is_active"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// FilamentSpool represents a physical filament spool in inventory.
|
||||
// material_finish_id defaults to 1 ("Basic"); material_modifier_id is optional.
|
||||
// Grams are always physically measured values — grams_used is derived, not stored.
|
||||
type FilamentSpool struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
MaterialBaseID int `json:"material_base_id"`
|
||||
MaterialBase *MaterialBase `json:"material_base,omitempty"` // JOIN
|
||||
MaterialFinishID int `json:"material_finish_id"`
|
||||
MaterialFinish *MaterialFinish `json:"material_finish,omitempty"` // JOIN
|
||||
MaterialModifierID *int `json:"material_modifier_id,omitempty"`
|
||||
MaterialModifier *MaterialModifier `json:"material_modifier,omitempty"` // JOIN
|
||||
ColorHex string `json:"color_hex"`
|
||||
Brand *string `json:"brand,omitempty"`
|
||||
DiameterMM float64 `json:"diameter_mm"`
|
||||
InitialGrams int `json:"initial_grams"`
|
||||
RemainingGrams int `json:"remaining_grams"`
|
||||
SpoolWeightGrams *int `json:"spool_weight_grams,omitempty"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LowStockThresholdGrams int `json:"low_stock_threshold_grams"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
Barcode *string `json:"barcode,omitempty"`
|
||||
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// PrintJob represents a single print on a specific printer.
|
||||
// The filament_spool_id is a convenience reference; multi-spool jobs track usage in usage_logs.
|
||||
type PrintJob struct {
|
||||
ID int `json:"id"`
|
||||
PrinterID int `json:"printer_id"`
|
||||
Printer *Printer `json:"printer,omitempty"` // JOIN
|
||||
FilamentSpoolID *int `json:"filament_spool_id,omitempty"`
|
||||
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||
JobName string `json:"job_name"`
|
||||
FileName *string `json:"file_name,omitempty"`
|
||||
JobStatusID int `json:"job_status_id"`
|
||||
JobStatus *JobStatus `json:"job_status,omitempty"` // JOIN
|
||||
StartedAt *time.Time `json:"started_at,omitempty"`
|
||||
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||
EstimatedDurationSeconds *int `json:"estimated_duration_seconds,omitempty"`
|
||||
TotalMMExtruded *float64 `json:"total_mm_extruded,omitempty"`
|
||||
TotalGramsUsed *float64 `json:"total_grams_used,omitempty"`
|
||||
TotalCostUSD *float64 `json:"total_cost_usd,omitempty"`
|
||||
Notes *string `json:"notes,omitempty"`
|
||||
DeletedAt *time.Time `json:"deleted_at,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// UsageLog records filament consumption for a specific spool during a print job.
|
||||
// This is the atomic unit of filament tracking — grams are derived from mm_extruded.
|
||||
type UsageLog struct {
|
||||
ID int `json:"id"`
|
||||
PrintJobID int `json:"print_job_id"`
|
||||
PrintJob *PrintJob `json:"print_job,omitempty"` // JOIN
|
||||
FilamentSpoolID int `json:"filament_spool_id"`
|
||||
FilamentSpool *FilamentSpool `json:"filament_spool,omitempty"` // JOIN
|
||||
MMExtruded float64 `json:"mm_extruded"`
|
||||
GramsUsed float64 `json:"grams_used"`
|
||||
CostUSD *float64 `json:"cost_usd,omitempty"`
|
||||
LoggedAt time.Time `json:"logged_at"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Application Settings
|
||||
// ============================================================================
|
||||
|
||||
// Setting represents a key-value application configuration entry.
|
||||
// The value is stored as JSONB in PostgreSQL, allowing flexible typed config.
|
||||
type Setting struct {
|
||||
ID int `json:"id"`
|
||||
Key string `json:"key"`
|
||||
Value []byte `json:"value"` // raw JSON — marshalled/unmarshalled by caller
|
||||
Description *string `json:"description,omitempty"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
285
backend/internal/repositories/filament_repository.go
Normal file
285
backend/internal/repositories/filament_repository.go
Normal file
@@ -0,0 +1,285 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// FilamentRepository handles database queries for filament_spools.
|
||||
type FilamentRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewFilamentRepository creates a FilamentRepository backed by the given pool.
|
||||
func NewFilamentRepository(pool *pgxpool.Pool) *FilamentRepository {
|
||||
return &FilamentRepository{pool: pool}
|
||||
}
|
||||
|
||||
// FilamentFilter holds query parameters for listing filament spools.
|
||||
type FilamentFilter struct {
|
||||
Material string // filter by material_base name (case-insensitive)
|
||||
Finish string // filter by material_finish name (case-insensitive)
|
||||
Color string // filter by exact color_hex match
|
||||
LowStock bool // if true, filter for remaining_grams <= low_stock_threshold_grams
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// spoolScanFields is the common SELECT column list for filament spools with JOINs.
|
||||
const spoolScanFields = `
|
||||
s.id, s.name,
|
||||
s.material_base_id,
|
||||
COALESCE(mb.name, '') as material_base_name,
|
||||
COALESCE(mb.density_g_cm3, 0) as material_base_density_g_cm3,
|
||||
COALESCE(mb.extrusion_temp_min, NULL::int) as material_base_extrusion_temp_min,
|
||||
COALESCE(mb.extrusion_temp_max, NULL::int) as material_base_extrusion_temp_max,
|
||||
COALESCE(mb.bed_temp_min, NULL::int) as material_base_bed_temp_min,
|
||||
COALESCE(mb.bed_temp_max, NULL::int) as material_base_bed_temp_max,
|
||||
COALESCE(mb.created_at, s.created_at) as material_base_created_at,
|
||||
COALESCE(mb.updated_at, s.created_at) as material_base_updated_at,
|
||||
s.material_finish_id,
|
||||
COALESCE(mf.name, '') as material_finish_name,
|
||||
mf.description as material_finish_description,
|
||||
COALESCE(mf.created_at, s.created_at) as material_finish_created_at,
|
||||
COALESCE(mf.updated_at, s.created_at) as material_finish_updated_at,
|
||||
s.material_modifier_id,
|
||||
mm.name as material_modifier_name,
|
||||
mm.description as material_modifier_description,
|
||||
mm.created_at as material_modifier_created_at,
|
||||
mm.updated_at as material_modifier_updated_at,
|
||||
s.color_hex, s.brand, s.diameter_mm,
|
||||
s.initial_grams, s.remaining_grams, s.spool_weight_grams,
|
||||
s.cost_usd, s.low_stock_threshold_grams,
|
||||
s.notes, s.barcode,
|
||||
s.deleted_at, s.created_at, s.updated_at`
|
||||
|
||||
const spoolFromJoins = `
|
||||
FROM filament_spools s
|
||||
LEFT JOIN material_bases mb ON s.material_base_id = mb.id
|
||||
LEFT JOIN material_finishes mf ON s.material_finish_id = mf.id
|
||||
LEFT JOIN material_modifiers mm ON s.material_modifier_id = mm.id`
|
||||
|
||||
// scanSpoolWithJoins scans a full spool row including all JOINed tables.
|
||||
func scanSpoolWithJoins(row interface{ Scan(...interface{}) error }) (models.FilamentSpool, error) {
|
||||
var s models.FilamentSpool
|
||||
var mb models.MaterialBase
|
||||
var mf models.MaterialFinish
|
||||
var mfDesc *string
|
||||
var modifierID *int
|
||||
var modName, modDesc *string
|
||||
var modCreatedAt, modUpdatedAt *time.Time
|
||||
|
||||
err := row.Scan(
|
||||
&s.ID, &s.Name,
|
||||
&s.MaterialBaseID,
|
||||
&mb.Name, &mb.DensityGCm3,
|
||||
&mb.ExtrusionTempMin, &mb.ExtrusionTempMax,
|
||||
&mb.BedTempMin, &mb.BedTempMax,
|
||||
&mb.CreatedAt, &mb.UpdatedAt,
|
||||
&s.MaterialFinishID,
|
||||
&mf.Name, &mfDesc,
|
||||
&mf.CreatedAt, &mf.UpdatedAt,
|
||||
&modifierID,
|
||||
&modName, &modDesc,
|
||||
&modCreatedAt, &modUpdatedAt,
|
||||
&s.ColorHex, &s.Brand, &s.DiameterMM,
|
||||
&s.InitialGrams, &s.RemainingGrams, &s.SpoolWeightGrams,
|
||||
&s.CostUSD, &s.LowStockThresholdGrams,
|
||||
&s.Notes, &s.Barcode,
|
||||
&s.DeletedAt, &s.CreatedAt, &s.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return s, err
|
||||
}
|
||||
|
||||
mb.ID = s.MaterialBaseID
|
||||
s.MaterialBase = &mb
|
||||
|
||||
mf.ID = s.MaterialFinishID
|
||||
if mfDesc != nil {
|
||||
mf.Description = mfDesc
|
||||
}
|
||||
s.MaterialFinish = &mf
|
||||
|
||||
s.MaterialModifierID = modifierID
|
||||
if modifierID != nil && modName != nil {
|
||||
mm := models.MaterialModifier{
|
||||
ID: *modifierID,
|
||||
Name: *modName,
|
||||
}
|
||||
if modDesc != nil {
|
||||
mm.Description = modDesc
|
||||
}
|
||||
if modCreatedAt != nil {
|
||||
mm.CreatedAt = *modCreatedAt
|
||||
}
|
||||
if modUpdatedAt != nil {
|
||||
mm.UpdatedAt = *modUpdatedAt
|
||||
}
|
||||
s.MaterialModifier = &mm
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// GetAll returns filament spools matching the given filters, with pagination.
|
||||
// Returns results, total matching count, and any error.
|
||||
func (r *FilamentRepository) GetAll(ctx context.Context, filter FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||
conditions := []string{"s.deleted_at IS NULL"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.Material != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(mb.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Material)
|
||||
argIdx++
|
||||
}
|
||||
if filter.Finish != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(mf.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Finish)
|
||||
argIdx++
|
||||
}
|
||||
if filter.Color != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("s.color_hex = $%d", argIdx))
|
||||
args = append(args, filter.Color)
|
||||
argIdx++
|
||||
}
|
||||
if filter.LowStock {
|
||||
conditions = append(conditions, "s.remaining_grams <= s.low_stock_threshold_grams")
|
||||
}
|
||||
|
||||
whereClause := ""
|
||||
if len(conditions) > 0 {
|
||||
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||
}
|
||||
|
||||
// Count total.
|
||||
var total int
|
||||
countQuery := "SELECT COUNT(*) " + spoolFromJoins + " " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := "SELECT " + spoolScanFields + " " + spoolFromJoins + " " +
|
||||
whereClause +
|
||||
" ORDER BY s.name ASC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var spools []models.FilamentSpool
|
||||
for rows.Next() {
|
||||
s, err := scanSpoolWithJoins(rows)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
spools = append(spools, s)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if spools == nil {
|
||||
spools = []models.FilamentSpool{}
|
||||
}
|
||||
|
||||
return spools, total, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single filament spool by ID with JOINed data.
|
||||
// Returns nil if not found or soft-deleted.
|
||||
func (r *FilamentRepository) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||
query := "SELECT " + spoolScanFields + " " + spoolFromJoins +
|
||||
" WHERE s.id = $1 AND s.deleted_at IS NULL"
|
||||
|
||||
row := r.pool.QueryRow(ctx, query, id)
|
||||
s, err := scanSpoolWithJoins(row)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s, nil
|
||||
}
|
||||
|
||||
// Create inserts a new filament spool and returns the created spool with JOINed data.
|
||||
func (r *FilamentRepository) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||
var id int
|
||||
err := r.pool.QueryRow(ctx, `
|
||||
INSERT INTO filament_spools (
|
||||
name, material_base_id, material_finish_id, material_modifier_id,
|
||||
color_hex, brand, diameter_mm, initial_grams, remaining_grams,
|
||||
spool_weight_grams, cost_usd, low_stock_threshold_grams,
|
||||
notes, barcode
|
||||
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14)
|
||||
RETURNING id
|
||||
`,
|
||||
spool.Name, spool.MaterialBaseID, spool.MaterialFinishID, spool.MaterialModifierID,
|
||||
spool.ColorHex, spool.Brand, spool.DiameterMM, spool.InitialGrams, spool.RemainingGrams,
|
||||
spool.SpoolWeightGrams, spool.CostUSD, spool.LowStockThresholdGrams,
|
||||
spool.Notes, spool.Barcode,
|
||||
).Scan(&id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// Update applies partial updates to an existing filament spool.
|
||||
// Only non-nil fields in the update map are applied.
|
||||
// Returns the updated spool.
|
||||
func (r *FilamentRepository) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||
if len(updates) == 0 {
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
setClauses := []string{"updated_at = NOW()"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
for col, val := range updates {
|
||||
setClauses = append(setClauses, fmt.Sprintf("%s = $%d", col, argIdx))
|
||||
args = append(args, val)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
args = append(args, id)
|
||||
query := fmt.Sprintf("UPDATE filament_spools SET %s WHERE id = $%d AND deleted_at IS NULL",
|
||||
strings.Join(setClauses, ", "), argIdx)
|
||||
|
||||
result, err := r.pool.Exec(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result.RowsAffected() == 0 {
|
||||
return nil, nil // not found or deleted
|
||||
}
|
||||
|
||||
return r.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// SoftDelete marks a filament spool as deleted by setting deleted_at = NOW().
|
||||
// Returns true if a row was affected.
|
||||
func (r *FilamentRepository) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||
result, err := r.pool.Exec(ctx, `
|
||||
UPDATE filament_spools
|
||||
SET deleted_at = NOW(), updated_at = NOW()
|
||||
WHERE id = $1 AND deleted_at IS NULL
|
||||
`, id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return result.RowsAffected() > 0, nil
|
||||
}
|
||||
54
backend/internal/repositories/material_repository.go
Normal file
54
backend/internal/repositories/material_repository.go
Normal file
@@ -0,0 +1,54 @@
|
||||
// Package repositories provides data access logic backed by PostgreSQL via pgxpool.
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// MaterialRepository handles database queries for material lookup tables.
|
||||
type MaterialRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewMaterialRepository creates a MaterialRepository backed by the given pool.
|
||||
func NewMaterialRepository(pool *pgxpool.Pool) *MaterialRepository {
|
||||
return &MaterialRepository{pool: pool}
|
||||
}
|
||||
|
||||
// GetAll returns all material bases ordered by name.
|
||||
func (r *MaterialRepository) GetAll(ctx context.Context) ([]models.MaterialBase, error) {
|
||||
rows, err := r.pool.Query(ctx, `
|
||||
SELECT id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max,
|
||||
bed_temp_min, bed_temp_max, created_at, updated_at
|
||||
FROM material_bases
|
||||
ORDER BY name
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var materials []models.MaterialBase
|
||||
for rows.Next() {
|
||||
var m models.MaterialBase
|
||||
if err := rows.Scan(
|
||||
&m.ID, &m.Name, &m.DensityGCm3,
|
||||
&m.ExtrusionTempMin, &m.ExtrusionTempMax,
|
||||
&m.BedTempMin, &m.BedTempMax,
|
||||
&m.CreatedAt, &m.UpdatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
materials = append(materials, m)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if materials == nil {
|
||||
materials = []models.MaterialBase{}
|
||||
}
|
||||
return materials, nil
|
||||
}
|
||||
157
backend/internal/repositories/print_job_repository.go
Normal file
157
backend/internal/repositories/print_job_repository.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// PrintJobRepository handles database queries for print_jobs.
|
||||
type PrintJobRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewPrintJobRepository creates a PrintJobRepository backed by the given pool.
|
||||
func NewPrintJobRepository(pool *pgxpool.Pool) *PrintJobRepository {
|
||||
return &PrintJobRepository{pool: pool}
|
||||
}
|
||||
|
||||
// PrintJobFilter holds query parameters for listing print jobs.
|
||||
type PrintJobFilter struct {
|
||||
Status string // filter by job_status name (case-insensitive)
|
||||
PrinterID *int // filter by printer_id
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// scanPrintJobWithJoins scans a print_job row with JOINed tables.
|
||||
func (r *PrintJobRepository) scanPrintJobWithJoins(row interface{ Scan(...interface{}) error }) (models.PrintJob, error) {
|
||||
var pj models.PrintJob
|
||||
var js models.JobStatus
|
||||
|
||||
err := row.Scan(
|
||||
&pj.ID, &pj.PrinterID, &pj.FilamentSpoolID,
|
||||
&pj.JobName, &pj.FileName,
|
||||
&pj.JobStatusID,
|
||||
&pj.StartedAt, &pj.CompletedAt,
|
||||
&pj.DurationSeconds, &pj.EstimatedDurationSeconds,
|
||||
&pj.TotalMMExtruded, &pj.TotalGramsUsed, &pj.TotalCostUSD,
|
||||
&pj.Notes,
|
||||
&pj.DeletedAt, &pj.CreatedAt, &pj.UpdatedAt,
|
||||
&js.ID, &js.Name,
|
||||
&js.CreatedAt, &js.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return pj, err
|
||||
}
|
||||
|
||||
pj.JobStatus = &js
|
||||
return pj, nil
|
||||
}
|
||||
|
||||
// GetAll returns print jobs matching the given filters, with pagination.
|
||||
func (r *PrintJobRepository) GetAll(ctx context.Context, filter PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||
conditions := []string{"pj.deleted_at IS NULL"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.Status != "" {
|
||||
conditions = append(conditions, fmt.Sprintf("LOWER(js.name) = LOWER($%d)", argIdx))
|
||||
args = append(args, filter.Status)
|
||||
argIdx++
|
||||
}
|
||||
if filter.PrinterID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("pj.printer_id = $%d", argIdx))
|
||||
args = append(args, *filter.PrinterID)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
whereClause := ""
|
||||
if len(conditions) > 0 {
|
||||
whereClause = "WHERE " + strings.Join(conditions, " AND ")
|
||||
}
|
||||
|
||||
// Count.
|
||||
var total int
|
||||
countQuery := `SELECT COUNT(*)
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
` + " " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := `SELECT
|
||||
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||
pj.job_name, pj.file_name,
|
||||
pj.job_status_id,
|
||||
pj.started_at, pj.completed_at,
|
||||
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||
pj.notes,
|
||||
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||
js.id, js.name,
|
||||
js.created_at, js.updated_at
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
` + whereClause +
|
||||
" ORDER BY pj.created_at DESC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var jobs []models.PrintJob
|
||||
for rows.Next() {
|
||||
pj, err := r.scanPrintJobWithJoins(rows)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
jobs = append(jobs, pj)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if jobs == nil {
|
||||
jobs = []models.PrintJob{}
|
||||
}
|
||||
|
||||
return jobs, total, nil
|
||||
}
|
||||
|
||||
// GetByID returns a single print job by ID with JOINed job_status.
|
||||
func (r *PrintJobRepository) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||
row := r.pool.QueryRow(ctx, `
|
||||
SELECT
|
||||
pj.id, pj.printer_id, pj.filament_spool_id,
|
||||
pj.job_name, pj.file_name,
|
||||
pj.job_status_id,
|
||||
pj.started_at, pj.completed_at,
|
||||
pj.duration_seconds, pj.estimated_duration_seconds,
|
||||
pj.total_mm_extruded, pj.total_grams_used, pj.total_cost_usd,
|
||||
pj.notes,
|
||||
pj.deleted_at, pj.created_at, pj.updated_at,
|
||||
js.id, js.name,
|
||||
js.created_at, js.updated_at
|
||||
FROM print_jobs pj
|
||||
LEFT JOIN job_statuses js ON pj.job_status_id = js.id
|
||||
WHERE pj.id = $1 AND pj.deleted_at IS NULL
|
||||
`, id)
|
||||
|
||||
pj, err := r.scanPrintJobWithJoins(row)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &pj, nil
|
||||
}
|
||||
78
backend/internal/repositories/printer_repository.go
Normal file
78
backend/internal/repositories/printer_repository.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// PrinterRepository handles database queries for printers.
|
||||
type PrinterRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewPrinterRepository creates a PrinterRepository backed by the given pool.
|
||||
func NewPrinterRepository(pool *pgxpool.Pool) *PrinterRepository {
|
||||
return &PrinterRepository{pool: pool}
|
||||
}
|
||||
|
||||
// scanPrinterWithType scans a printer row with JOINed printer_type.
|
||||
func (r *PrinterRepository) scanPrinterWithType(row interface{ Scan(...interface{}) error }) (models.Printer, error) {
|
||||
var p models.Printer
|
||||
var pt models.PrinterType
|
||||
|
||||
err := row.Scan(
|
||||
&p.ID, &p.Name, &p.PrinterTypeID,
|
||||
&p.Manufacturer, &p.Model,
|
||||
&p.MoonrakerURL, &p.MoonrakerAPIKey,
|
||||
&p.MQTTBrokerHost, &p.MQTTTopicPrefix,
|
||||
&p.MQTTTLSEnabled, &p.IsActive,
|
||||
&p.CreatedAt, &p.UpdatedAt,
|
||||
&pt.ID, &pt.Name,
|
||||
&pt.CreatedAt, &pt.UpdatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return p, err
|
||||
}
|
||||
|
||||
p.PrinterType = &pt
|
||||
return p, nil
|
||||
}
|
||||
|
||||
// GetAll returns all printers joined with their printer_type, ordered by name.
|
||||
func (r *PrinterRepository) GetAll(ctx context.Context) ([]models.Printer, error) {
|
||||
rows, err := r.pool.Query(ctx, `
|
||||
SELECT p.id, p.name, p.printer_type_id,
|
||||
p.manufacturer, p.model,
|
||||
p.moonraker_url, p.moonraker_api_key,
|
||||
p.mqtt_broker_host, p.mqtt_topic_prefix,
|
||||
p.mqtt_tls_enabled, p.is_active,
|
||||
p.created_at, p.updated_at,
|
||||
pt.id, pt.name,
|
||||
pt.created_at, pt.updated_at
|
||||
FROM printers p
|
||||
JOIN printer_types pt ON p.printer_type_id = pt.id
|
||||
ORDER BY p.name
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var printers []models.Printer
|
||||
for rows.Next() {
|
||||
p, err := r.scanPrinterWithType(rows)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
printers = append(printers, p)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if printers == nil {
|
||||
printers = []models.Printer{}
|
||||
}
|
||||
return printers, nil
|
||||
}
|
||||
96
backend/internal/repositories/usage_log_repository.go
Normal file
96
backend/internal/repositories/usage_log_repository.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package repositories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// UsageLogRepository handles database queries for usage_logs.
|
||||
type UsageLogRepository struct {
|
||||
pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
// NewUsageLogRepository creates a UsageLogRepository backed by the given pool.
|
||||
func NewUsageLogRepository(pool *pgxpool.Pool) *UsageLogRepository {
|
||||
return &UsageLogRepository{pool: pool}
|
||||
}
|
||||
|
||||
// UsageLogFilter holds query parameters for listing usage logs.
|
||||
type UsageLogFilter struct {
|
||||
SpoolID *int // filter by filament_spool_id
|
||||
JobID *int // filter by print_job_id
|
||||
Limit int
|
||||
Offset int
|
||||
}
|
||||
|
||||
// GetAll returns usage logs matching the given filters, with pagination.
|
||||
func (r *UsageLogRepository) GetAll(ctx context.Context, filter UsageLogFilter) ([]models.UsageLog, int, error) {
|
||||
conditions := []string{"1=1"}
|
||||
args := []interface{}{}
|
||||
argIdx := 1
|
||||
|
||||
if filter.SpoolID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("ul.filament_spool_id = $%d", argIdx))
|
||||
args = append(args, *filter.SpoolID)
|
||||
argIdx++
|
||||
}
|
||||
if filter.JobID != nil {
|
||||
conditions = append(conditions, fmt.Sprintf("ul.print_job_id = $%d", argIdx))
|
||||
args = append(args, *filter.JobID)
|
||||
argIdx++
|
||||
}
|
||||
|
||||
whereClause := "WHERE " + fmt.Sprintf("%s", conditions[0])
|
||||
for _, c := range conditions[1:] {
|
||||
whereClause += " AND " + c
|
||||
}
|
||||
|
||||
// Count.
|
||||
var total int
|
||||
countQuery := "SELECT COUNT(*) FROM usage_logs ul " + whereClause
|
||||
if err := r.pool.QueryRow(ctx, countQuery, args...).Scan(&total); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Query with pagination.
|
||||
dataQuery := `SELECT id, print_job_id, filament_spool_id, mm_extruded,
|
||||
grams_used, cost_usd, logged_at, created_at
|
||||
FROM usage_logs ul
|
||||
` + whereClause +
|
||||
" ORDER BY ul.logged_at DESC" +
|
||||
fmt.Sprintf(" LIMIT $%d OFFSET $%d", argIdx, argIdx+1)
|
||||
|
||||
dataArgs := make([]interface{}, len(args))
|
||||
copy(dataArgs, args)
|
||||
dataArgs = append(dataArgs, filter.Limit, filter.Offset)
|
||||
|
||||
rows, err := r.pool.Query(ctx, dataQuery, dataArgs...)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var logs []models.UsageLog
|
||||
for rows.Next() {
|
||||
var l models.UsageLog
|
||||
if err := rows.Scan(
|
||||
&l.ID, &l.PrintJobID, &l.FilamentSpoolID,
|
||||
&l.MMExtruded, &l.GramsUsed, &l.CostUSD,
|
||||
&l.LoggedAt, &l.CreatedAt,
|
||||
); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
logs = append(logs, l)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if logs == nil {
|
||||
logs = []models.UsageLog{}
|
||||
}
|
||||
|
||||
return logs, total, nil
|
||||
}
|
||||
90
backend/internal/router/router.go
Normal file
90
backend/internal/router/router.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package router
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/config"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/handlers"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/services"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// New creates and configures a Chi router with all middleware and handlers mounted.
|
||||
func New(cfg *config.Config, dbPool *pgxpool.Pool, sseBC *sse.Broadcaster) chi.Router {
|
||||
r := chi.NewRouter()
|
||||
|
||||
// Middleware
|
||||
r.Use(middleware.RequestID)
|
||||
r.Use(middleware.RealIP)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.Recoverer)
|
||||
// Timeout middleware is applied per-route below to exclude SSE
|
||||
|
||||
// CORS
|
||||
r.Use(func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Access-Control-Allow-Origin", cfg.CorsOrigin)
|
||||
w.Header().Set("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
|
||||
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Authorization")
|
||||
if r.Method == http.MethodOptions {
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
})
|
||||
|
||||
// Health check (with timeout)
|
||||
healthHandler := handlers.NewHealthHandler(dbPool)
|
||||
r.With(middleware.Timeout(30 * time.Second)).Get("/health", healthHandler.ServeHTTP)
|
||||
|
||||
// ── Repositories ──────────────────────────────────────────────────────
|
||||
materialRepo := repositories.NewMaterialRepository(dbPool)
|
||||
filamentRepo := repositories.NewFilamentRepository(dbPool)
|
||||
printerRepo := repositories.NewPrinterRepository(dbPool)
|
||||
printJobRepo := repositories.NewPrintJobRepository(dbPool)
|
||||
usageLogRepo := repositories.NewUsageLogRepository(dbPool)
|
||||
|
||||
// ── Services ──────────────────────────────────────────────────────────
|
||||
filamentService := services.NewFilamentService(filamentRepo)
|
||||
printerService := services.NewPrinterService(printerRepo)
|
||||
printJobService := services.NewPrintJobService(printJobRepo)
|
||||
|
||||
// ── Handlers ──────────────────────────────────────────────────────────
|
||||
materialHandler := handlers.NewMaterialHandler(materialRepo)
|
||||
filamentHandler := handlers.NewFilamentHandler(filamentService)
|
||||
printerHandler := handlers.NewPrinterHandler(printerService)
|
||||
printJobHandler := handlers.NewPrintJobHandler(printJobService)
|
||||
usageLogHandler := handlers.NewUsageLogHandler(usageLogRepo)
|
||||
|
||||
// ── API Routes (with timeout) ─────────────────────────────────────────
|
||||
r.Route("/api", func(r chi.Router) {
|
||||
r.Use(middleware.Timeout(60 * time.Second))
|
||||
r.Get("/materials", materialHandler.List)
|
||||
|
||||
r.Route("/filaments", func(r chi.Router) {
|
||||
r.Get("/", filamentHandler.List)
|
||||
r.Post("/", filamentHandler.Create)
|
||||
r.Route("/{id}", func(r chi.Router) {
|
||||
r.Get("/", filamentHandler.Get)
|
||||
r.Put("/", filamentHandler.Update)
|
||||
r.Delete("/", filamentHandler.Delete)
|
||||
})
|
||||
})
|
||||
|
||||
r.Get("/printers", printerHandler.List)
|
||||
r.Get("/print-jobs", printJobHandler.List)
|
||||
r.Get("/usage-logs", usageLogHandler.List)
|
||||
|
||||
// SSE Events stream
|
||||
sseHandler := sse.NewHandler(sseBC)
|
||||
r.Get("/events", sseHandler.ServeHTTP)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
82
backend/internal/services/services.go
Normal file
82
backend/internal/services/services.go
Normal file
@@ -0,0 +1,82 @@
|
||||
// Package services contains business logic and application services.
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
)
|
||||
|
||||
// FilamentService wraps FilamentRepository with business logic and validation.
|
||||
type FilamentService struct {
|
||||
repo *repositories.FilamentRepository
|
||||
}
|
||||
|
||||
// NewFilamentService creates a FilamentService backed by the given repository.
|
||||
func NewFilamentService(repo *repositories.FilamentRepository) *FilamentService {
|
||||
return &FilamentService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns paginated filament spools filtered by the given criteria.
|
||||
func (s *FilamentService) List(ctx context.Context, filter repositories.FilamentFilter) ([]models.FilamentSpool, int, error) {
|
||||
return s.repo.GetAll(ctx, filter)
|
||||
}
|
||||
|
||||
// GetByID returns a single filament spool by ID.
|
||||
func (s *FilamentService) GetByID(ctx context.Context, id int) (*models.FilamentSpool, error) {
|
||||
return s.repo.GetByID(ctx, id)
|
||||
}
|
||||
|
||||
// Create validates and creates a new filament spool.
|
||||
func (s *FilamentService) Create(ctx context.Context, spool *models.FilamentSpool) (*models.FilamentSpool, error) {
|
||||
if err := validateFilamentSpool(spool); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.repo.Create(ctx, spool)
|
||||
}
|
||||
|
||||
// Update applies partial updates to a filament spool after validation.
|
||||
func (s *FilamentService) Update(ctx context.Context, id int, updates map[string]interface{}) (*models.FilamentSpool, error) {
|
||||
return s.repo.Update(ctx, id, updates)
|
||||
}
|
||||
|
||||
// SoftDelete marks a filament spool as deleted.
|
||||
func (s *FilamentService) SoftDelete(ctx context.Context, id int) (bool, error) {
|
||||
return s.repo.SoftDelete(ctx, id)
|
||||
}
|
||||
|
||||
// PrinterService wraps PrinterRepository.
|
||||
type PrinterService struct {
|
||||
repo *repositories.PrinterRepository
|
||||
}
|
||||
|
||||
// NewPrinterService creates a PrinterService backed by the given repository.
|
||||
func NewPrinterService(repo *repositories.PrinterRepository) *PrinterService {
|
||||
return &PrinterService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns all printers.
|
||||
func (s *PrinterService) List(ctx context.Context) ([]models.Printer, error) {
|
||||
return s.repo.GetAll(ctx)
|
||||
}
|
||||
|
||||
// PrintJobService wraps PrintJobRepository.
|
||||
type PrintJobService struct {
|
||||
repo *repositories.PrintJobRepository
|
||||
}
|
||||
|
||||
// NewPrintJobService creates a PrintJobService backed by the given repository.
|
||||
func NewPrintJobService(repo *repositories.PrintJobRepository) *PrintJobService {
|
||||
return &PrintJobService{repo: repo}
|
||||
}
|
||||
|
||||
// List returns paginated print jobs filtered by the given criteria.
|
||||
func (s *PrintJobService) List(ctx context.Context, filter repositories.PrintJobFilter) ([]models.PrintJob, int, error) {
|
||||
return s.repo.GetAll(ctx, filter)
|
||||
}
|
||||
|
||||
// GetByID returns a single print job by ID.
|
||||
func (s *PrintJobService) GetByID(ctx context.Context, id int) (*models.PrintJob, error) {
|
||||
return s.repo.GetByID(ctx, id)
|
||||
}
|
||||
74
backend/internal/services/validation.go
Normal file
74
backend/internal/services/validation.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/dtos"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
)
|
||||
|
||||
// colorHexPattern validates hex color strings like #FF0000 or #ff0000.
|
||||
var colorHexPattern = regexp.MustCompile(`^#[0-9A-Fa-f]{6}$`)
|
||||
|
||||
// validateFilamentSpool performs validation on a FilamentSpool entity.
|
||||
// Returns a descriptive error on failure.
|
||||
func validateFilamentSpool(s *models.FilamentSpool) error {
|
||||
if s.Name == "" {
|
||||
return errors.New("name is required")
|
||||
}
|
||||
if s.MaterialBaseID <= 0 {
|
||||
return errors.New("material_base_id is required")
|
||||
}
|
||||
if s.MaterialFinishID <= 0 {
|
||||
return errors.New("material_finish_id is required")
|
||||
}
|
||||
if !colorHexPattern.MatchString(s.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if s.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if s.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateCreateFilamentRequest validates a creation DTO.
|
||||
func ValidateCreateFilamentRequest(req dtos.CreateFilamentRequest) error {
|
||||
if req.Name == "" {
|
||||
return errors.New("name is required")
|
||||
}
|
||||
if req.MaterialBaseID <= 0 {
|
||||
return errors.New("material_base_id is required")
|
||||
}
|
||||
if req.MaterialFinishID <= 0 {
|
||||
return errors.New("material_finish_id is required")
|
||||
}
|
||||
if !colorHexPattern.MatchString(req.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if req.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if req.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateUpdateFilamentRequest validates partial update fields.
|
||||
func ValidateUpdateFilamentRequest(req dtos.UpdateFilamentRequest) error {
|
||||
if req.ColorHex != nil && !colorHexPattern.MatchString(*req.ColorHex) {
|
||||
return fmt.Errorf("color_hex must be a valid hex color (e.g., #FF0000)")
|
||||
}
|
||||
if req.InitialGrams != nil && *req.InitialGrams <= 0 {
|
||||
return errors.New("initial_grams must be greater than 0")
|
||||
}
|
||||
if req.RemainingGrams != nil && *req.RemainingGrams < 0 {
|
||||
return errors.New("remaining_grams must be >= 0")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
133
backend/internal/sse/broadcaster.go
Normal file
133
backend/internal/sse/broadcaster.go
Normal file
@@ -0,0 +1,133 @@
|
||||
package sse
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// client represents a single SSE subscriber — identified by its send channel.
|
||||
type client struct {
|
||||
ch chan string
|
||||
}
|
||||
|
||||
// Broadcaster receives Events on its input channel and fans them out to every
|
||||
// connected client. Subscribe adds a new client; Unsubscribe removes one.
|
||||
// Start must be called before the broadcaster accepts events.
|
||||
type Broadcaster struct {
|
||||
input chan Event
|
||||
subscribe chan client
|
||||
unsubscribe chan client
|
||||
clients map[chan string]struct{}
|
||||
done chan struct{}
|
||||
once sync.Once
|
||||
}
|
||||
|
||||
// NewBroadcaster creates a Broadcaster. bufSize controls the buffer depth for
|
||||
// the input channel as well as for each per-client outbound channel.
|
||||
func NewBroadcaster(bufSize int) *Broadcaster {
|
||||
if bufSize <= 0 {
|
||||
bufSize = 64
|
||||
}
|
||||
return &Broadcaster{
|
||||
input: make(chan Event, bufSize),
|
||||
subscribe: make(chan client),
|
||||
unsubscribe: make(chan client),
|
||||
clients: make(map[chan string]struct{}),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Publish pushes an event into the broadcaster. Safe for concurrent use.
|
||||
func (b *Broadcaster) Publish(ev Event) {
|
||||
select {
|
||||
case b.input <- ev:
|
||||
case <-b.done:
|
||||
// Silently drop during shutdown.
|
||||
}
|
||||
}
|
||||
|
||||
// Start launches the broadcaster's fan-out loop in a goroutine.
|
||||
// It must be called before Publish is used.
|
||||
func (b *Broadcaster) Start() {
|
||||
go b.loop()
|
||||
}
|
||||
|
||||
// Stop terminates the fan-out loop and closes all client channels.
|
||||
// It is safe to call multiple times.
|
||||
func (b *Broadcaster) Stop() {
|
||||
b.once.Do(func() {
|
||||
close(b.done)
|
||||
})
|
||||
}
|
||||
|
||||
// Subscribe returns a new client channel that receives SSE-formatted strings.
|
||||
func (b *Broadcaster) Subscribe() chan string {
|
||||
c := client{ch: make(chan string, 64)}
|
||||
select {
|
||||
case b.subscribe <- c:
|
||||
case <-b.done:
|
||||
// Broadcaster already stopped — return a closed chan so the handler
|
||||
// can bail out quickly.
|
||||
ch := make(chan string)
|
||||
close(ch)
|
||||
return ch
|
||||
}
|
||||
return c.ch
|
||||
}
|
||||
|
||||
// Unsubscribe removes a client channel and closes it.
|
||||
func (b *Broadcaster) Unsubscribe(ch chan string) {
|
||||
c := client{ch: ch}
|
||||
select {
|
||||
case b.unsubscribe <- c:
|
||||
case <-b.done:
|
||||
// Already shutting down — channels will be cleaned up by Stop.
|
||||
}
|
||||
}
|
||||
|
||||
// loop is the core fan-out goroutine.
|
||||
func (b *Broadcaster) loop() {
|
||||
for {
|
||||
select {
|
||||
case ev := <-b.input:
|
||||
sse := ev.toSSE()
|
||||
for ch := range b.clients {
|
||||
// Non-blocking send — slow clients are dropped.
|
||||
select {
|
||||
case ch <- sse:
|
||||
default:
|
||||
slog.Warn("sse broadcaster: dropping event for slow client", "type", ev.Type)
|
||||
}
|
||||
}
|
||||
|
||||
case c := <-b.subscribe:
|
||||
b.clients[c.ch] = struct{}{}
|
||||
slog.Debug("sse broadcaster: client connected", "total_clients", len(b.clients))
|
||||
|
||||
case c := <-b.unsubscribe:
|
||||
if _, ok := b.clients[c.ch]; ok {
|
||||
delete(b.clients, c.ch)
|
||||
close(c.ch)
|
||||
slog.Debug("sse broadcaster: client disconnected", "total_clients", len(b.clients))
|
||||
}
|
||||
|
||||
case <-b.done:
|
||||
// Drain remaining events in input before shutting down.
|
||||
for ev := range b.input {
|
||||
sse := ev.toSSE()
|
||||
for ch := range b.clients {
|
||||
select {
|
||||
case ch <- sse:
|
||||
default:
|
||||
}
|
||||
}
|
||||
}
|
||||
// Close all remaining client channels.
|
||||
for ch := range b.clients {
|
||||
close(ch)
|
||||
}
|
||||
b.clients = nil
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
92
backend/internal/sse/events.go
Normal file
92
backend/internal/sse/events.go
Normal file
@@ -0,0 +1,92 @@
|
||||
// Package sse provides Server-Sent Events infrastructure for real-time updates.
|
||||
// Includes event types, a central broadcaster, and an HTTP handler.
|
||||
package sse
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
// EventType identifies the category of an SSE event.
|
||||
type EventType string
|
||||
|
||||
const (
|
||||
EventPrinterStatus EventType = "printer.status"
|
||||
EventJobStarted EventType = "job.started"
|
||||
EventJobCompleted EventType = "job.completed"
|
||||
EventFilamentLow EventType = "filament.low"
|
||||
)
|
||||
|
||||
// Event is a JSON-serializable SSE event pushed through the broadcaster.
|
||||
type Event struct {
|
||||
Type EventType `json:"type"`
|
||||
Payload json.RawMessage `json:"payload"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
}
|
||||
|
||||
// PrinterStatusPayload carries printer online/offline/printing state.
|
||||
type PrinterStatusPayload struct {
|
||||
PrinterID int `json:"printer_id"`
|
||||
PrinterName string `json:"printer_name"`
|
||||
Status string `json:"status"` // "online", "offline", "printing"
|
||||
}
|
||||
|
||||
// JobStartedPayload carries initial print job info.
|
||||
type JobStartedPayload struct {
|
||||
JobID int `json:"job_id"`
|
||||
JobName string `json:"job_name"`
|
||||
PrinterID int `json:"printer_id"`
|
||||
SpoolID *int `json:"spool_id,omitempty"`
|
||||
}
|
||||
|
||||
// JobCompletedPayload carries final print job data including usage.
|
||||
type JobCompletedPayload struct {
|
||||
JobID int `json:"job_id"`
|
||||
JobName string `json:"job_name"`
|
||||
PrinterID int `json:"printer_id"`
|
||||
DurationSeconds *int `json:"duration_seconds,omitempty"`
|
||||
TotalGramsUsed *float64 `json:"total_grams_used,omitempty"`
|
||||
TotalCostUSD *float64 `json:"total_cost_usd,omitempty"`
|
||||
}
|
||||
|
||||
// FilamentLowPayload alerts that a spool is below its threshold.
|
||||
type FilamentLowPayload struct {
|
||||
SpoolID int `json:"spool_id"`
|
||||
SpoolName string `json:"spool_name"`
|
||||
RemainingGrams int `json:"remaining_grams"`
|
||||
ThresholdGrams int `json:"threshold_grams"`
|
||||
}
|
||||
|
||||
// NewEvent creates an Event with the current timestamp from a typed payload.
|
||||
func NewEvent(eventType EventType, payload any) (Event, error) {
|
||||
raw, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return Event{}, err
|
||||
}
|
||||
return Event{
|
||||
Type: eventType,
|
||||
Payload: raw,
|
||||
Timestamp: time.Now().UTC(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// MustEvent creates an Event and panics on marshal failure (for use with
|
||||
// known-good payloads in tests and internal wiring).
|
||||
func MustEvent(eventType EventType, payload any) Event {
|
||||
ev, err := NewEvent(eventType, payload)
|
||||
if err != nil {
|
||||
panic("sse.MustEvent: failed to marshal payload: " + err.Error())
|
||||
}
|
||||
return ev
|
||||
}
|
||||
|
||||
// toSSE formats this Event as a standard SSE message string ready to be
|
||||
// written to a response writer. The format is:
|
||||
//
|
||||
// event: <type>
|
||||
// data: <json>
|
||||
//
|
||||
func (e Event) toSSE() string {
|
||||
data, _ := json.Marshal(e)
|
||||
return "event: " + string(e.Type) + "\n" + "data: " + string(data) + "\n\n"
|
||||
}
|
||||
59
backend/internal/sse/handler.go
Normal file
59
backend/internal/sse/handler.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package sse
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler is the HTTP handler for the GET /api/events SSE stream.
|
||||
// It registers a client with the broadcaster, streams events as they arrive,
|
||||
// and unregisters on disconnect.
|
||||
type Handler struct {
|
||||
bc *Broadcaster
|
||||
}
|
||||
|
||||
// NewHandler creates a Handler backed by the given Broadcaster.
|
||||
func NewHandler(bc *Broadcaster) *Handler {
|
||||
return &Handler{bc: bc}
|
||||
}
|
||||
|
||||
// ServeHTTP implements the SSE streaming endpoint.
|
||||
// Flusher is required; clients that do not support flushing receive a 501.
|
||||
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
http.Error(w, "streaming not supported", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
|
||||
// SSE-specific headers
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
w.Header().Set("X-Accel-Buffering", "no") // Disable nginx buffering
|
||||
|
||||
// Write headers immediately
|
||||
flusher.Flush()
|
||||
|
||||
// Subscribe to the broadcaster
|
||||
ch := h.bc.Subscribe()
|
||||
defer h.bc.Unsubscribe(ch)
|
||||
|
||||
// Use request context for cancellation when the client disconnects.
|
||||
ctx := r.Context()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case msg, ok := <-ch:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
_, err := w.Write([]byte(msg))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
flusher.Flush()
|
||||
}
|
||||
}
|
||||
}
|
||||
321
backend/internal/workers/moonraker_poller.go
Normal file
321
backend/internal/workers/moonraker_poller.go
Normal file
@@ -0,0 +1,321 @@
|
||||
// Package workers provides background goroutines for printer telemetry.
|
||||
package workers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/clients"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// MoonrakerPollerConfig controls the background polling behaviour.
|
||||
type MoonrakerPollerConfig struct {
|
||||
PollInterval time.Duration
|
||||
RequestTimeout time.Duration
|
||||
}
|
||||
|
||||
// DefaultMoonrakerPollerConfig returns sensible defaults.
|
||||
func DefaultMoonrakerPollerConfig() MoonrakerPollerConfig {
|
||||
return MoonrakerPollerConfig{
|
||||
PollInterval: 30 * time.Second,
|
||||
RequestTimeout: 10 * time.Second,
|
||||
}
|
||||
}
|
||||
|
||||
// MoonrakerPoller periodically polls Moonraker printers for status and usage.
|
||||
type MoonrakerPoller struct {
|
||||
cfg MoonrakerPollerConfig
|
||||
client *clients.MoonrakerClient
|
||||
printerRepo *repositories.PrinterRepository
|
||||
jobRepo *repositories.PrintJobRepository
|
||||
usageRepo *repositories.UsageLogRepository
|
||||
sseBC *sse.Broadcaster
|
||||
pool *pgxpool.Pool
|
||||
stop chan struct{}
|
||||
}
|
||||
|
||||
// NewMoonrakerPoller creates a poller. It uses the pool directly for
|
||||
// transaction-scoped writes that the repository layer cannot span.
|
||||
func NewMoonrakerPoller(
|
||||
cfg MoonrakerPollerConfig,
|
||||
pool *pgxpool.Pool,
|
||||
printerRepo *repositories.PrinterRepository,
|
||||
jobRepo *repositories.PrintJobRepository,
|
||||
usageRepo *repositories.UsageLogRepository,
|
||||
sseBC *sse.Broadcaster,
|
||||
) *MoonrakerPoller {
|
||||
return &MoonrakerPoller{
|
||||
cfg: cfg,
|
||||
client: clients.NewMoonrakerClient(cfg.RequestTimeout),
|
||||
printerRepo: printerRepo,
|
||||
jobRepo: jobRepo,
|
||||
usageRepo: usageRepo,
|
||||
sseBC: sseBC,
|
||||
pool: pool,
|
||||
stop: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Start begins the polling loop in a goroutine.
|
||||
func (p *MoonrakerPoller) Start() {
|
||||
go p.loop()
|
||||
}
|
||||
|
||||
// Stop signals the loop to exit.
|
||||
func (p *MoonrakerPoller) Stop() {
|
||||
close(p.stop)
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) loop() {
|
||||
ticker := time.NewTicker(p.cfg.PollInterval)
|
||||
defer ticker.Stop()
|
||||
|
||||
// Immediate first tick.
|
||||
p.pollCycle()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
p.pollCycle()
|
||||
case <-p.stop:
|
||||
slog.Info("moonraker poller stopped")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) pollCycle() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
printers, err := p.printerRepo.GetAll(ctx)
|
||||
if err != nil {
|
||||
slog.Error("moonraker poller: failed to list printers", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, printer := range printers {
|
||||
if !printer.IsActive || printer.MoonrakerURL == nil || *printer.MoonrakerURL == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := p.pollPrinter(ctx, printer); err != nil {
|
||||
slog.Warn("moonraker poller: poll failed",
|
||||
"printer", printer.Name,
|
||||
"error", err,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pollPrinter performs a single Moonraker poll for a printer.
|
||||
func (p *MoonrakerPoller) pollPrinter(ctx context.Context, printer models.Printer) error {
|
||||
host := *printer.MoonrakerURL
|
||||
var apiKey string
|
||||
if printer.MoonrakerAPIKey != nil {
|
||||
apiKey = *printer.MoonrakerAPIKey
|
||||
}
|
||||
|
||||
// Fetch printer info (status)
|
||||
info, err := p.client.GetPrinterInfo(ctx, host, 80, apiKey)
|
||||
if err != nil {
|
||||
p.broadcastStatus(printer.ID, printer.Name, "offline")
|
||||
return err
|
||||
}
|
||||
|
||||
status := mapMoonrakerState(info.State)
|
||||
p.broadcastStatus(printer.ID, printer.Name, status)
|
||||
|
||||
// Fetch print stats
|
||||
stats, err := p.client.GetPrintStats(ctx, host, 80, apiKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getPrintStats failed: %w", err)
|
||||
}
|
||||
|
||||
if status == "printing" && stats.Filename != "" {
|
||||
p.broadcastJobStarted(printer.ID, stats.Filename)
|
||||
}
|
||||
|
||||
if isCompleteState(stats.State) && stats.FilamentUsedMm > 0 {
|
||||
// Record usage
|
||||
if err := p.recordUsage(ctx, printer, stats); err != nil {
|
||||
slog.Error("moonraker poller: record usage failed",
|
||||
"printer", printer.Name, "error", err)
|
||||
} else {
|
||||
p.broadcastJobCompleted(printer.ID, stats.Filename, stats.FilamentUsedMm)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) recordUsage(ctx context.Context, printer models.Printer, stats *clients.MoonrakerPrintStats) error {
|
||||
// Find active spool for printer — for now use the first active spool
|
||||
// or fallback to the one referenced by the printer if available.
|
||||
// In a real scenario we'd query AMS slots or fallback logic.
|
||||
// Here we simply look for the most recently used spool in usage_logs.
|
||||
var spoolID int
|
||||
row := p.pool.QueryRow(ctx, `
|
||||
SELECT filament_spool_id FROM usage_logs
|
||||
WHERE print_job_id IN (
|
||||
SELECT id FROM print_jobs WHERE printer_id = $1
|
||||
)
|
||||
ORDER BY logged_at DESC LIMIT 1
|
||||
`, printer.ID)
|
||||
_ = row.Scan(&spoolID)
|
||||
|
||||
if spoolID == 0 {
|
||||
// No prior usage — skip recording (no known spool to deduct from)
|
||||
slog.Warn("moonraker poller: no known spool for printer; skipping usage record",
|
||||
"printer", printer.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Compute grams from mm extruded using defaults (1.75mm diameter, PLA density 1.24)
|
||||
grams := calculateGrams(stats.FilamentUsedMm, 1.75, 1.24)
|
||||
|
||||
// Create a print job record
|
||||
var jobID int
|
||||
err := p.pool.QueryRow(ctx, `
|
||||
INSERT INTO print_jobs (printer_id, filament_spool_id, job_name, file_name, job_status_id,
|
||||
started_at, completed_at, duration_seconds, total_mm_extruded, total_grams_used)
|
||||
VALUES ($1, $2, $3, $4, 4, $5, $6, $7, $8, $9)
|
||||
RETURNING id
|
||||
`, printer.ID, spoolID, stats.Filename, stats.Filename,
|
||||
time.Now().Add(-time.Duration(stats.TotalDuration)*time.Second),
|
||||
time.Now(),
|
||||
int(stats.TotalDuration),
|
||||
stats.FilamentUsedMm,
|
||||
grams,
|
||||
).Scan(&jobID)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert print_job failed: %w", err)
|
||||
}
|
||||
|
||||
// Create usage_log
|
||||
_, err = p.pool.Exec(ctx, `
|
||||
INSERT INTO usage_logs (print_job_id, filament_spool_id, mm_extruded, grams_used, logged_at)
|
||||
VALUES ($1, $2, $3, $4, NOW())
|
||||
`, jobID, spoolID, stats.FilamentUsedMm, grams)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("insert usage_log failed: %w", err)
|
||||
}
|
||||
|
||||
slog.Info("moonraker poller: recorded usage",
|
||||
"printer", printer.Name,
|
||||
"job", stats.Filename,
|
||||
"mm", stats.FilamentUsedMm,
|
||||
"grams", grams,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) broadcastStatus(printerID int, name, status string) {
|
||||
if p.sseBC == nil {
|
||||
return
|
||||
}
|
||||
ev, err := sse.NewEvent(sse.EventPrinterStatus, sse.PrinterStatusPayload{
|
||||
PrinterID: printerID,
|
||||
PrinterName: name,
|
||||
Status: status,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
p.sseBC.Publish(ev)
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) broadcastJobStarted(printerID int, jobName string) {
|
||||
if p.sseBC == nil {
|
||||
return
|
||||
}
|
||||
ev, err := sse.NewEvent(sse.EventJobStarted, sse.JobStartedPayload{
|
||||
JobName: jobName,
|
||||
PrinterID: printerID,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
p.sseBC.Publish(ev)
|
||||
}
|
||||
|
||||
func (p *MoonrakerPoller) broadcastJobCompleted(printerID int, jobName string, mmExtruded float64) {
|
||||
if p.sseBC == nil {
|
||||
return
|
||||
}
|
||||
grams := calculateGrams(mmExtruded, 1.75, 1.24)
|
||||
gramsInt := int(grams)
|
||||
ev, err := sse.NewEvent(sse.EventJobCompleted, sse.JobCompletedPayload{
|
||||
JobName: jobName,
|
||||
PrinterID: printerID,
|
||||
TotalGramsUsed: &gramsInt,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
p.sseBC.Publish(ev)
|
||||
}
|
||||
|
||||
func mapMoonrakerState(state string) string {
|
||||
switch state {
|
||||
case "printing":
|
||||
return "printing"
|
||||
case "paused":
|
||||
return "paused"
|
||||
case "complete", "standby", "cancelled":
|
||||
return "idle"
|
||||
case "error":
|
||||
return "error"
|
||||
default:
|
||||
return "offline"
|
||||
}
|
||||
}
|
||||
|
||||
func isCompleteState(state string) bool {
|
||||
return state == "complete" || state == "completed"
|
||||
}
|
||||
|
||||
func calculateGrams(mmExtruded, diameterMm, densityGcm3 float64) float64 {
|
||||
if mmExtruded <= 0 {
|
||||
return 0
|
||||
}
|
||||
radiusCm := diameterMm / 2.0 / 10.0
|
||||
crossSection := 3.141592653589793 * radiusCm * radiusCm
|
||||
volumeCm3 := (mmExtruded / 10.0) * crossSection
|
||||
return volumeCm3 * densityGcm3
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper for port parsing (Moonraker URL may contain port)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func extractHostPort(rawURL string) (string, int) {
|
||||
// Very simplistic: if rawURL contains ":" after a dot, parse host:port.
|
||||
// Otherwise assume host only and return port 80.
|
||||
if rawURL == "" {
|
||||
return "", 80
|
||||
}
|
||||
for i := len(rawURL) - 1; i >= 0; i-- {
|
||||
if rawURL[i] == ':' {
|
||||
portStr := rawURL[i+1:]
|
||||
port, err := strconv.Atoi(portStr)
|
||||
if err == nil {
|
||||
return rawURL[:i], port
|
||||
}
|
||||
break
|
||||
}
|
||||
if rawURL[i] == '/' {
|
||||
break
|
||||
}
|
||||
}
|
||||
return rawURL, 80
|
||||
}
|
||||
223
backend/internal/workers/mqtt_subscriber.go
Normal file
223
backend/internal/workers/mqtt_subscriber.go
Normal file
@@ -0,0 +1,223 @@
|
||||
// Package workers provides background goroutines for printer telemetry.
|
||||
package workers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/clients"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/models"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/repositories"
|
||||
"github.com/CubeCraft-Creations/Extrudex/backend/internal/sse"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
// MQTTSubscriberConfig controls MQTT background worker behaviour.
|
||||
type MQTTSubscriberConfig struct {
|
||||
ReconnectInterval time.Duration
|
||||
}
|
||||
|
||||
// DefaultMQTTSubscriberConfig returns sensible defaults.
|
||||
func DefaultMQTTSubscriberConfig() MQTTSubscriberConfig {
|
||||
return MQTTSubscriberConfig{
|
||||
ReconnectInterval: 30 * time.Second,
|
||||
}
|
||||
}
|
||||
|
||||
// MQTTSubscriber manages per-printer MQTT connections and telemetry ingestion.
|
||||
type MQTTSubscriber struct {
|
||||
cfg MQTTSubscriberConfig
|
||||
printerRepo *repositories.PrinterRepository
|
||||
usageRepo *repositories.UsageLogRepository
|
||||
sseBC *sse.Broadcaster
|
||||
pool *pgxpool.Pool
|
||||
clients map[int]*clients.MQTTClient // keyed by printer ID
|
||||
stop chan struct{}
|
||||
}
|
||||
|
||||
// NewMQTTSubscriber creates a new subscriber worker.
|
||||
func NewMQTTSubscriber(
|
||||
cfg MQTTSubscriberConfig,
|
||||
pool *pgxpool.Pool,
|
||||
printerRepo *repositories.PrinterRepository,
|
||||
usageRepo *repositories.UsageLogRepository,
|
||||
sseBC *sse.Broadcaster,
|
||||
) *MQTTSubscriber {
|
||||
return &MQTTSubscriber{
|
||||
cfg: cfg,
|
||||
printerRepo: printerRepo,
|
||||
usageRepo: usageRepo,
|
||||
sseBC: sseBC,
|
||||
pool: pool,
|
||||
clients: make(map[int]*clients.MQTTClient),
|
||||
stop: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Start begins the connection manager loop.
|
||||
func (s *MQTTSubscriber) Start() {
|
||||
go s.loop()
|
||||
}
|
||||
|
||||
// Stop signals the loop to exit and disconnects all clients.
|
||||
func (s *MQTTSubscriber) Stop() {
|
||||
close(s.stop)
|
||||
}
|
||||
|
||||
func (s *MQTTSubscriber) loop() {
|
||||
// Initial connect attempt.
|
||||
s.connectAll()
|
||||
|
||||
ticker := time.NewTicker(s.cfg.ReconnectInterval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
s.connectAll()
|
||||
case <-s.stop:
|
||||
slog.Info("mqtt subscriber stopped")
|
||||
for _, c := range s.clients {
|
||||
c.Disconnect(1000)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MQTTSubscriber) connectAll() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
printers, err := s.printerRepo.GetAll(ctx)
|
||||
if err != nil {
|
||||
slog.Error("mqtt subscriber: failed to list printers", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, printer := range printers {
|
||||
if !printer.IsActive || printer.MQTTBrokerHost == nil || *printer.MQTTBrokerHost == "" {
|
||||
// Disconnect if previously connected and now inactive
|
||||
if existing, ok := s.clients[printer.ID]; ok {
|
||||
existing.Disconnect(1000)
|
||||
delete(s.clients, printer.ID)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := s.clients[printer.ID]; ok {
|
||||
// Already connected — skip
|
||||
continue
|
||||
}
|
||||
|
||||
topicPrefix := ""
|
||||
if printer.MQTTTopicPrefix != nil {
|
||||
topicPrefix = *printer.MQTTTopicPrefix
|
||||
}
|
||||
|
||||
cfg := clients.MQTTConfig{
|
||||
BrokerHost: *printer.MQTTBrokerHost,
|
||||
TopicPrefix: topicPrefix,
|
||||
TLSEnabled: printer.MQTTTLSEnabled,
|
||||
ClientID: fmt.Sprintf("extrudex-printer-%d", printer.ID),
|
||||
}
|
||||
|
||||
c, err := clients.NewMQTTClient(cfg)
|
||||
if err != nil {
|
||||
slog.Warn("mqtt subscriber: connect failed",
|
||||
"printer", printer.Name,
|
||||
"broker", cfg.BrokerHost,
|
||||
"error", err,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
s.clients[printer.ID] = c
|
||||
|
||||
// Subscribe to telemetry topics
|
||||
topics := clients.DefaultBambuTopics(topicPrefix)
|
||||
for _, topic := range topics {
|
||||
if err := c.Subscribe(topic, 0, s.makeHandler(printer)); err != nil {
|
||||
slog.Warn("mqtt subscriber: subscribe failed",
|
||||
"printer", printer.Name,
|
||||
"topic", topic,
|
||||
"error", err,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
slog.Info("mqtt subscriber: connected",
|
||||
"printer", printer.Name,
|
||||
"broker", cfg.BrokerHost,
|
||||
"topics", topics,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MQTTSubscriber) makeHandler(printer models.Printer) func([]byte) {
|
||||
return func(payload []byte) {
|
||||
slog.Debug("mqtt subscriber: message received",
|
||||
"printer", printer.Name,
|
||||
"size", len(payload),
|
||||
)
|
||||
|
||||
// Attempt Bambu Lab parse
|
||||
telemetry, err := clients.ParseBambuTelemetry(payload)
|
||||
if err != nil {
|
||||
slog.Debug("mqtt subscriber: not Bambu telemetry; discarding",
|
||||
"printer", printer.Name, "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Determine status from telemetry
|
||||
status := "idle"
|
||||
if telemetry.Print.Stage > 0 {
|
||||
status = "printing"
|
||||
}
|
||||
s.broadcastStatus(printer.ID, printer.Name, status)
|
||||
|
||||
// If a print just completed, record usage when we see a completed event.
|
||||
// Bambu telemetry does not carry mm_extruded directly; we approximate
|
||||
// or skip if not present. Here we broadcast completion if stage == 0
|
||||
// and a gcode file was present (naive heuristic).
|
||||
if telemetry.Print.GcodeFile != "" && telemetry.Print.Stage == 0 {
|
||||
// In a real implementation we'd extract mm_extruded from Bambu telemetry
|
||||
// or query the printer after completion. For now broadcast completion.
|
||||
s.broadcastJobCompleted(printer.ID, telemetry.Print.GcodeFile, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MQTTSubscriber) broadcastStatus(printerID int, name, status string) {
|
||||
if s.sseBC == nil {
|
||||
return
|
||||
}
|
||||
ev, err := sse.NewEvent(sse.EventPrinterStatus, sse.PrinterStatusPayload{
|
||||
PrinterID: printerID,
|
||||
PrinterName: name,
|
||||
Status: status,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
s.sseBC.Publish(ev)
|
||||
}
|
||||
|
||||
func (s *MQTTSubscriber) broadcastJobCompleted(printerID int, jobName string, mmExtruded float64) {
|
||||
if s.sseBC == nil {
|
||||
return
|
||||
}
|
||||
grams := calculateGrams(mmExtruded, 1.75, 1.24)
|
||||
gramsInt := int(grams)
|
||||
ev, err := sse.NewEvent(sse.EventJobCompleted, sse.JobCompletedPayload{
|
||||
JobName: jobName,
|
||||
PrinterID: printerID,
|
||||
TotalGramsUsed: &gramsInt,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
s.sseBC.Publish(ev)
|
||||
}
|
||||
19
backend/migrations/000001_initial_schema.down.sql
Normal file
19
backend/migrations/000001_initial_schema.down.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- Migration: 000001_initial_schema (rollback)
|
||||
-- Description: Drop all tables and indexes created in the initial schema migration
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
|
||||
BEGIN;
|
||||
|
||||
DROP TABLE IF EXISTS usage_logs CASCADE;
|
||||
DROP TABLE IF EXISTS print_jobs CASCADE;
|
||||
DROP TABLE IF EXISTS filament_spools CASCADE;
|
||||
DROP TABLE IF EXISTS printers CASCADE;
|
||||
DROP TABLE IF EXISTS settings CASCADE;
|
||||
DROP TABLE IF EXISTS material_modifiers CASCADE;
|
||||
DROP TABLE IF EXISTS material_finishes CASCADE;
|
||||
DROP TABLE IF EXISTS material_bases CASCADE;
|
||||
DROP TABLE IF EXISTS job_statuses CASCADE;
|
||||
DROP TABLE IF EXISTS printer_types CASCADE;
|
||||
|
||||
COMMIT;
|
||||
231
backend/migrations/000001_initial_schema.up.sql
Normal file
231
backend/migrations/000001_initial_schema.up.sql
Normal file
@@ -0,0 +1,231 @@
|
||||
-- Migration: 000001_initial_schema
|
||||
-- Description: Create initial Extrudex schema — lookup tables, core entities, and settings
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
--
|
||||
-- Design decisions:
|
||||
-- - Lookup tables for material_base, material_finish, material_modifier (no free-text enums)
|
||||
-- - Lookup tables for printer_type and job_status (extensible, no hard-coded enum values)
|
||||
-- - FK ON DELETE: RESTRICT on critical parents (material_base, material_finish, printer),
|
||||
-- SET NULL on optional parents (modifier, spool on print_jobs),
|
||||
-- CASCADE for usage_logs when parent job is deleted
|
||||
-- - Soft-delete (deleted_at) on spools and print_jobs for safety
|
||||
-- - JSONB config column on settings for flexible app-wide configuration
|
||||
-- - All identifiers snake_case per project convention
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Lookup Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Printer types (fdm, resin, etc.) — extensible, not a raw enum
|
||||
CREATE TABLE printer_types (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Job statuses (pending, printing, paused, completed, failed, cancelled)
|
||||
CREATE TABLE job_statuses (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(50) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material base types (PLA, PETG, ABS, TPU, ASA, Nylon, PC)
|
||||
CREATE TABLE material_bases (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
density_g_cm3 DECIMAL(5,3) NOT NULL,
|
||||
extrusion_temp_min INT,
|
||||
extrusion_temp_max INT,
|
||||
bed_temp_min INT,
|
||||
bed_temp_max INT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material finishes (Basic, Silk, Matte, Glossy, Satin)
|
||||
CREATE TABLE material_finishes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Material modifiers (Wood-Filled, Carbon Fiber, Glow-in-Dark, Marble)
|
||||
CREATE TABLE material_modifiers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL UNIQUE,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Core Entity Tables
|
||||
-- ============================================================================
|
||||
|
||||
-- 3D printers in the fleet
|
||||
CREATE TABLE printers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
printer_type_id INT NOT NULL,
|
||||
manufacturer VARCHAR(255),
|
||||
model VARCHAR(255),
|
||||
moonraker_url VARCHAR(512),
|
||||
moonraker_api_key VARCHAR(512),
|
||||
mqtt_broker_host VARCHAR(255),
|
||||
mqtt_topic_prefix VARCHAR(255),
|
||||
mqtt_tls_enabled BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_printers_printer_type
|
||||
FOREIGN KEY (printer_type_id) REFERENCES printer_types(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- Filament spools — the core inventory item
|
||||
CREATE TABLE filament_spools (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
material_base_id INT NOT NULL,
|
||||
material_finish_id INT NOT NULL DEFAULT 1, -- "Basic" (seed data populates this first)
|
||||
material_modifier_id INT,
|
||||
color_hex VARCHAR(7) NOT NULL CHECK (color_hex ~ '^#[0-9A-Fa-f]{6}$'),
|
||||
brand VARCHAR(255),
|
||||
diameter_mm DECIMAL(4,2) NOT NULL DEFAULT 1.75,
|
||||
initial_grams INT NOT NULL CHECK (initial_grams > 0),
|
||||
remaining_grams INT NOT NULL CHECK (remaining_grams >= 0),
|
||||
spool_weight_grams INT, -- measured empty-spool weight (tare), nullable
|
||||
cost_usd DECIMAL(10,2),
|
||||
low_stock_threshold_grams INT NOT NULL DEFAULT 50,
|
||||
notes TEXT,
|
||||
barcode VARCHAR(255) UNIQUE,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_spools_material_base
|
||||
FOREIGN KEY (material_base_id) REFERENCES material_bases(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_spools_material_finish
|
||||
FOREIGN KEY (material_finish_id) REFERENCES material_finishes(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_spools_material_modifier
|
||||
FOREIGN KEY (material_modifier_id) REFERENCES material_modifiers(id)
|
||||
ON DELETE SET NULL
|
||||
);
|
||||
|
||||
-- Print jobs — each job is one print on one printer
|
||||
CREATE TABLE print_jobs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
printer_id INT NOT NULL,
|
||||
filament_spool_id INT, -- nullable: a job may use multiple spools (captured in usage_logs)
|
||||
job_name VARCHAR(255) NOT NULL,
|
||||
file_name VARCHAR(512),
|
||||
job_status_id INT NOT NULL DEFAULT 1, -- "pending"
|
||||
started_at TIMESTAMPTZ,
|
||||
completed_at TIMESTAMPTZ,
|
||||
duration_seconds INT,
|
||||
estimated_duration_seconds INT,
|
||||
total_mm_extruded DECIMAL(12,2),
|
||||
total_grams_used DECIMAL(10,2),
|
||||
total_cost_usd DECIMAL(10,4),
|
||||
notes TEXT,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_print_jobs_printer
|
||||
FOREIGN KEY (printer_id) REFERENCES printers(id)
|
||||
ON DELETE RESTRICT,
|
||||
|
||||
CONSTRAINT fk_print_jobs_spool
|
||||
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||
ON DELETE SET NULL,
|
||||
|
||||
CONSTRAINT fk_print_jobs_status
|
||||
FOREIGN KEY (job_status_id) REFERENCES job_statuses(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- Usage logs — granular tracking of filament consumed per job, per spool
|
||||
CREATE TABLE usage_logs (
|
||||
id SERIAL PRIMARY KEY,
|
||||
print_job_id INT NOT NULL,
|
||||
filament_spool_id INT NOT NULL,
|
||||
mm_extruded DECIMAL(12,2) NOT NULL CHECK (mm_extruded > 0),
|
||||
grams_used DECIMAL(10,2) NOT NULL CHECK (grams_used > 0),
|
||||
cost_usd DECIMAL(10,4),
|
||||
logged_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT fk_usage_logs_print_job
|
||||
FOREIGN KEY (print_job_id) REFERENCES print_jobs(id)
|
||||
ON DELETE CASCADE,
|
||||
|
||||
CONSTRAINT fk_usage_logs_spool
|
||||
FOREIGN KEY (filament_spool_id) REFERENCES filament_spools(id)
|
||||
ON DELETE RESTRICT
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Application Settings
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE settings (
|
||||
id SERIAL PRIMARY KEY,
|
||||
key VARCHAR(255) NOT NULL UNIQUE,
|
||||
value JSONB NOT NULL,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Indexes
|
||||
-- ============================================================================
|
||||
|
||||
-- Filament spools — query patterns: lookup by material, low-stock scans, barcode scans
|
||||
CREATE INDEX ix_spools_material_base_id ON filament_spools(material_base_id);
|
||||
CREATE INDEX ix_spools_material_finish_id ON filament_spools(material_finish_id);
|
||||
CREATE INDEX ix_spools_material_modifier_id ON filament_spools(material_modifier_id);
|
||||
CREATE INDEX ix_spools_remaining_grams ON filament_spools(remaining_grams)
|
||||
WHERE deleted_at IS NULL; -- partial index: only active spools for low-stock queries
|
||||
CREATE INDEX ix_spools_barcode ON filament_spools(barcode)
|
||||
WHERE barcode IS NOT NULL AND deleted_at IS NULL;
|
||||
CREATE INDEX ix_spools_deleted_at ON filament_spools(deleted_at)
|
||||
WHERE deleted_at IS NOT NULL; -- small index for soft-delete filtering
|
||||
|
||||
-- Printers
|
||||
CREATE INDEX ix_printers_printer_type_id ON printers(printer_type_id);
|
||||
CREATE INDEX ix_printers_is_active ON printers(is_active)
|
||||
WHERE is_active = TRUE; -- partial index for fleet dashboard queries
|
||||
|
||||
-- Print jobs — query by printer, status, date range, and soft-delete filter
|
||||
CREATE INDEX ix_print_jobs_printer_id ON print_jobs(printer_id);
|
||||
CREATE INDEX ix_print_jobs_spool_id ON print_jobs(filament_spool_id)
|
||||
WHERE filament_spool_id IS NOT NULL;
|
||||
CREATE INDEX ix_print_jobs_status_id ON print_jobs(job_status_id);
|
||||
CREATE INDEX ix_print_jobs_created_at ON print_jobs(created_at DESC);
|
||||
CREATE INDEX ix_print_jobs_deleted_at ON print_jobs(deleted_at)
|
||||
WHERE deleted_at IS NOT NULL;
|
||||
|
||||
-- Usage logs — always queried by job or spool
|
||||
CREATE INDEX ix_usage_logs_print_job_id ON usage_logs(print_job_id);
|
||||
CREATE INDEX ix_usage_logs_spool_id ON usage_logs(filament_spool_id);
|
||||
CREATE INDEX ix_usage_logs_logged_at ON usage_logs(logged_at DESC);
|
||||
|
||||
-- Settings — key lookups
|
||||
CREATE INDEX ix_settings_key ON settings(key);
|
||||
|
||||
COMMIT;
|
||||
15
backend/migrations/000002_seed_data.down.sql
Normal file
15
backend/migrations/000002_seed_data.down.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
-- Migration: 000002_seed_data (rollback)
|
||||
-- Description: Remove seed data inserted in 000002
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
|
||||
BEGIN;
|
||||
|
||||
DELETE FROM settings WHERE key IN ('default_low_stock_threshold_grams', 'default_diameter_mm', 'filament_cross_section_area_mm2');
|
||||
DELETE FROM material_modifiers WHERE id IN (1, 2, 3, 4);
|
||||
DELETE FROM material_finishes WHERE id IN (1, 2, 3, 4, 5);
|
||||
DELETE FROM material_bases WHERE id IN (1, 2, 3, 4, 5, 6, 7);
|
||||
DELETE FROM job_statuses WHERE id IN (1, 2, 3, 4, 5, 6);
|
||||
DELETE FROM printer_types WHERE id IN (1, 2);
|
||||
|
||||
COMMIT;
|
||||
95
backend/migrations/000002_seed_data.up.sql
Normal file
95
backend/migrations/000002_seed_data.up.sql
Normal file
@@ -0,0 +1,95 @@
|
||||
-- Seed Data: Extrudex common reference data
|
||||
-- Author: Hex
|
||||
-- Date: 2026-05-06
|
||||
--
|
||||
-- IMPORTANT: IDs are explicitly assigned to satisfy the DEFAULT constraints:
|
||||
-- - filament_spools.material_finish_id DEFAULT 1 ("Basic")
|
||||
-- - print_jobs.job_status_id DEFAULT 1 ("pending")
|
||||
--
|
||||
-- Density values sourced from common manufacturer specifications.
|
||||
-- Temperature ranges are conservative/typical; users can override per-spool.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Printer Types
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO printer_types (id, name) VALUES
|
||||
(1, 'fdm'),
|
||||
(2, 'resin')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
-- Reset the sequence so future inserts start after our explicit IDs
|
||||
SELECT setval('printer_types_id_seq', GREATEST(2, (SELECT MAX(id) FROM printer_types)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Job Statuses
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO job_statuses (id, name) VALUES
|
||||
(1, 'pending'),
|
||||
(2, 'printing'),
|
||||
(3, 'paused'),
|
||||
(4, 'completed'),
|
||||
(5, 'failed'),
|
||||
(6, 'cancelled')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('job_statuses_id_seq', GREATEST(6, (SELECT MAX(id) FROM job_statuses)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Bases (common filament types)
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO material_bases (id, name, density_g_cm3, extrusion_temp_min, extrusion_temp_max, bed_temp_min, bed_temp_max) VALUES
|
||||
(1, 'PLA', 1.24, 190, 220, 0, 60),
|
||||
(2, 'PETG', 1.27, 230, 250, 70, 90),
|
||||
(3, 'ABS', 1.04, 230, 260, 90, 110),
|
||||
(4, 'TPU', 1.21, 220, 250, 0, 60),
|
||||
(5, 'ASA', 1.07, 240, 260, 90, 110),
|
||||
(6, 'Nylon', 1.14, 240, 280, 70, 100),
|
||||
(7, 'PC', 1.20, 260, 310, 90, 120)
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_bases_id_seq', GREATEST(7, (SELECT MAX(id) FROM material_bases)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Finishes
|
||||
-- ============================================================================
|
||||
-- ID 1 = "Basic" is the default for new spools (DEFAULT 1 constraint)
|
||||
|
||||
INSERT INTO material_finishes (id, name, description) VALUES
|
||||
(1, 'Basic', 'Standard solid-color filament with no special finish'),
|
||||
(2, 'Silk', 'Glossy silk-like sheen, often used for decorative prints'),
|
||||
(3, 'Matte', 'Flat non-reflective surface finish'),
|
||||
(4, 'Glossy', 'High-shine reflective surface'),
|
||||
(5, 'Satin', 'Semi-gloss between matte and glossy')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_finishes_id_seq', GREATEST(5, (SELECT MAX(id) FROM material_finishes)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Material Modifiers
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO material_modifiers (id, name, description) VALUES
|
||||
(1, 'Wood-Filled', 'Contains wood fibers for natural wood-like appearance and texture'),
|
||||
(2, 'Carbon Fiber', 'Reinforced with carbon fibers for increased stiffness and strength'),
|
||||
(3, 'Glow-in-Dark', 'Phosphorescent additive that glows after exposure to light'),
|
||||
(4, 'Marble', 'Contains specks for a stone-like marble appearance')
|
||||
ON CONFLICT (id) DO NOTHING;
|
||||
|
||||
SELECT setval('material_modifiers_id_seq', GREATEST(4, (SELECT MAX(id) FROM material_modifiers)));
|
||||
|
||||
-- ============================================================================
|
||||
-- Default Application Settings
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO settings (key, value, description) VALUES
|
||||
('default_low_stock_threshold_grams', '50', 'Default grams threshold for low-stock alerts on new spools'),
|
||||
('default_diameter_mm', '1.75', 'Default filament diameter for new spools (1.75mm is the modern standard)'),
|
||||
('filament_cross_section_area_mm2', '2.405', 'Cross-sectional area for 1.75mm filament: π × (1.75/2)²')
|
||||
ON CONFLICT (key) DO NOTHING;
|
||||
|
||||
COMMIT;
|
||||
34
deploy.sh
Executable file
34
deploy.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "🔧 Deploying Extrudex Docker runtime..."
|
||||
|
||||
# Check if Docker Compose is available
|
||||
if ! command -v docker-compose &> /dev/null && ! docker compose version &> /dev/null; then
|
||||
echo "❌ Docker Compose is not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
COMPOSE_CMD="docker compose"
|
||||
if command -v docker-compose &> /dev/null; then
|
||||
COMPOSE_CMD="docker-compose"
|
||||
fi
|
||||
|
||||
echo "📦 Building and starting services..."
|
||||
$COMPOSE_CMD -f docker-compose.dev.yml up -d --build
|
||||
|
||||
echo "⏳ Waiting for services to become healthy..."
|
||||
sleep 15
|
||||
|
||||
echo "✅ Deployment complete!"
|
||||
echo ""
|
||||
echo "Services running:"
|
||||
echo " • PostgreSQL: localhost:5433"
|
||||
echo " • Extrudex API: http://localhost:5080"
|
||||
echo " • Extrudex Web: http://localhost:5081"
|
||||
echo ""
|
||||
echo "To view logs:"
|
||||
echo " $COMPOSE_CMD -f docker-compose.dev.yml logs -f"
|
||||
echo ""
|
||||
echo "To stop:"
|
||||
echo " $COMPOSE_CMD -f docker-compose.dev.yml down"
|
||||
70
docker-compose.dev.yml
Normal file
70
docker-compose.dev.yml
Normal file
@@ -0,0 +1,70 @@
|
||||
services:
|
||||
extrudex-db:
|
||||
image: postgres:16-alpine
|
||||
container_name: extrudex-db
|
||||
environment:
|
||||
POSTGRES_USER: extrudex
|
||||
POSTGRES_PASSWORD: changeme
|
||||
POSTGRES_DB: extrudex
|
||||
ports:
|
||||
- "5433:5432"
|
||||
volumes:
|
||||
- extrudex-db-data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U extrudex"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- extrudex-network
|
||||
|
||||
extrudex-api:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
container_name: extrudex-api
|
||||
ports:
|
||||
- "5080:8080"
|
||||
environment:
|
||||
- ASPNETCORE_ENVIRONMENT=Development
|
||||
- ASPNETCORE_URLS=http://+:8080
|
||||
- EXTRUDEX_DB_HOST=extrudex-db
|
||||
- EXTRUDEX_DB_PORT=5432
|
||||
- EXTRUDEX_DB_NAME=extrudex
|
||||
- EXTRUDEX_DB_USER=extrudex
|
||||
- EXTRUDEX_DB_PASSWORD=changeme
|
||||
depends_on:
|
||||
extrudex-db:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
networks:
|
||||
- extrudex-network
|
||||
|
||||
extrudex-web:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: Dockerfile
|
||||
container_name: extrudex-web
|
||||
ports:
|
||||
- "5081:80"
|
||||
depends_on:
|
||||
extrudex-api:
|
||||
condition: service_healthy
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- extrudex-network
|
||||
|
||||
volumes:
|
||||
extrudex-db-data:
|
||||
|
||||
networks:
|
||||
extrudex-network:
|
||||
driver: bridge
|
||||
14
frontend/Dockerfile
Normal file
14
frontend/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
# Build stage
|
||||
FROM node:22-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
|
||||
# Serve stage
|
||||
FROM nginx:alpine
|
||||
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
28
frontend/eslint.config.js
Normal file
28
frontend/eslint.config.js
Normal file
@@ -0,0 +1,28 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
|
||||
export default tseslint.config(
|
||||
{ ignores: ['dist'] },
|
||||
{
|
||||
extends: [js.configs.recommended, ...tseslint.configs.recommended],
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
plugins: {
|
||||
'react-hooks': reactHooks,
|
||||
'react-refresh': reactRefresh,
|
||||
},
|
||||
rules: {
|
||||
...reactHooks.configs.recommended.rules,
|
||||
'react-refresh/only-export-components': [
|
||||
'warn',
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
)
|
||||
13
frontend/index.html
Normal file
13
frontend/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Extrudex</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
23
frontend/nginx.conf
Normal file
23
frontend/nginx.conf
Normal file
@@ -0,0 +1,23 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://backend:8080/api/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
3688
frontend/package-lock.json
generated
Normal file
3688
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user