chore: add missing source modules to version control
Deploy to Production / deploy (push) Failing after 7s
Deploy to Production / deploy (push) Failing after 7s
apix-demo, apix-portal/src, apix-spider/src, apix-registry/src, apix-common/src were never staged. Without them the CI build has no source to compile and the Docker images cannot be produced. Also adds docs/ (infrastructure notes) missed in prior commits. Co-Authored-By: Mira <noreply@anthropic.com>
This commit is contained in:
@@ -5,42 +5,98 @@ import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Email;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import org.eclipse.microprofile.openapi.annotations.media.Schema;
|
||||
import org.hibernate.validator.constraints.URL;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
@Schema(description = "Bot Service Manifest (BSM) payload — the machine-readable description of a service registered in the APIX registry. An AI agent reads this to understand what the service does, how to call it, and under what terms.")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public record BsmPayload(
|
||||
|
||||
@Schema(description = "Human-readable service name.", example = "Acme Translation Service")
|
||||
@NotBlank String name,
|
||||
|
||||
@Schema(description = "What this service does, in plain language readable by an AI agent. Should describe inputs, outputs, and intended use cases.", example = "Translates text between 50 languages. Input: source text + target language code. Output: translated text with confidence score.")
|
||||
@NotBlank String description,
|
||||
|
||||
@Schema(description = "Base URL of the service endpoint. Must be publicly reachable. Agents POST requests here.", example = "https://api.acme.example/translate")
|
||||
@NotBlank @URL String endpoint,
|
||||
|
||||
@Schema(description = "Capability identifiers this service fulfils. Use lowercase kebab-case strings (e.g. nlp, translation, speech-to-text, image-classification, summarisation). Agents search the registry by these values — choose terms an agent would naturally use when looking for this type of service.", example = "[\"translation\", \"nlp\"]")
|
||||
@NotEmpty List<@NotBlank String> capabilities,
|
||||
|
||||
@Schema(description = "Contact email of the registrant. Used for verification notifications and O-level progression.", example = "ops@acme.example")
|
||||
@NotBlank @Email String registrantEmail,
|
||||
|
||||
@Schema(description = "Full legal name of the registrant (person or organisation).", example = "Acme GmbH")
|
||||
@NotBlank String registrantName,
|
||||
|
||||
@Schema(description = "ISO 3166-1 alpha-2 country code of the registrant's legal jurisdiction.", example = "DE")
|
||||
@NotBlank String registrantJurisdiction,
|
||||
|
||||
@Schema(description = "Legal form of the registrant organisation. Defaults to INDIVIDUAL if omitted.")
|
||||
OrgType registrantOrgType,
|
||||
|
||||
@Schema(description = "Legal Entity Identifier (LEI, ISO 17442). 20-character alphanumeric code issued by a GLEIF-accredited Local Operating Unit. Required to reach O-level LEGAL_ENTITY_VERIFIED (O2) or above.", example = "5493001KJTIIGC8Y1R12")
|
||||
String registrantLei,
|
||||
|
||||
@Schema(description = "URL of the OpenAPI 3.x specification for this service. Agents follow this link to discover available operations, request/response schemas, and authentication requirements.", example = "https://api.acme.example/openapi.json")
|
||||
@URL String openApiSpecUrl,
|
||||
|
||||
@Schema(description = "URL of the Model Context Protocol (MCP) manifest. Enables AI agents to invoke this service as an MCP tool without writing custom integration code.", example = "https://api.acme.example/mcp/manifest.json")
|
||||
@URL String mcpSpecUrl,
|
||||
|
||||
@Schema(description = "URL of the service's terms of use or acceptable-use policy.", example = "https://acme.example/terms")
|
||||
@URL String policyUrl,
|
||||
|
||||
@Schema(description = "URL of the security disclosure page (e.g. /.well-known/security.txt). Required for O-level HYGIENE_VERIFIED (O3).", example = "https://acme.example/.well-known/security.txt")
|
||||
@URL String securityContactUrl,
|
||||
|
||||
@Schema(description = "Pricing information. Omit for free or internally-billed services.")
|
||||
@Valid Pricing pricing,
|
||||
|
||||
@Schema(description = "BSM payload schema version. Must be '0.1' for the current registry.", example = "0.1")
|
||||
@NotBlank String bsmVersion,
|
||||
|
||||
@Schema(description = "Lifecycle stage of the service. Defaults to DEVELOPMENT if omitted. Only PRODUCTION services are returned by default capability searches (?capability=X without an explicit ?stage= parameter).")
|
||||
ServiceStage serviceStage,
|
||||
// IoT transition fields — null for non-IoT services
|
||||
|
||||
@Schema(description = "IoT migration lock. When true, agents are blocked from automatically switching to a replacement service. Use during controlled IoT device migration windows.")
|
||||
Boolean locked,
|
||||
|
||||
@Schema(description = "Scheduled decommission timestamp (UTC, ISO 8601). Must be set when transitioning to DEPRECATED stage. Agents use this to plan migration timelines.", example = "2027-01-01T00:00:00Z")
|
||||
Instant sunsetAt,
|
||||
|
||||
@Schema(description = "URL of a human- or machine-readable migration guide for consumers of this service.", example = "https://acme.example/migrate-v1-to-v2")
|
||||
@URL String migrationGuideUrl,
|
||||
List<UUID> replacesServiceIds
|
||||
|
||||
@Schema(description = "UUIDs of services this entry supersedes. Consumers of those deprecated services are directed here via the /replacements endpoint.")
|
||||
List<UUID> replacesServiceIds,
|
||||
|
||||
@Schema(description = "Domain-specific extension properties that are not covered by the standard BSM fields. " +
|
||||
"Keys are free-form strings; values may be strings, numbers, or booleans. " +
|
||||
"Extensions are stored and queryable: use ?property=key:value in capability searches to filter by any extension field. " +
|
||||
"Example uses: industry vertical ('industry':'healthcare'), geographic scope ('region':'eu'), " +
|
||||
"data-residency requirement ('dataResidency':'DE'), agent framework ('agentFramework':'langchain').")
|
||||
@JsonInclude(JsonInclude.Include.NON_EMPTY)
|
||||
Map<String, Object> extensions
|
||||
|
||||
) {
|
||||
@Schema(description = "Pricing details for metered or subscription services.")
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public record Pricing(
|
||||
@Schema(description = "Billing model. Known values: PER_CALL, SUBSCRIPTION, FREE.", example = "PER_CALL")
|
||||
String billingModel,
|
||||
@Schema(description = "Price per unit in the stated currency.", example = "0.001")
|
||||
BigDecimal pricePerCall,
|
||||
@Schema(description = "ISO 4217 currency code.", example = "EUR")
|
||||
String currency,
|
||||
@Schema(description = "Billing unit description.", example = "per-1k-tokens")
|
||||
String billingUnit
|
||||
) {}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,25 @@
|
||||
package org.botstandards.apix.common;
|
||||
|
||||
import org.eclipse.microprofile.openapi.annotations.media.Schema;
|
||||
|
||||
@Schema(description = "Organisation verification level assigned by the APIX registry. Higher levels indicate greater identity assurance. Agents can filter search results by minimum O-level using the minOLevel parameter on the /replacements endpoint.")
|
||||
public enum OLevel {
|
||||
|
||||
@Schema(description = "No verification performed. Default for all newly registered services.")
|
||||
UNVERIFIED,
|
||||
|
||||
@Schema(description = "O1: DNS ownership of the registrant domain verified. The registry confirmed the registrant controls the domain via a DNS TXT record challenge.")
|
||||
IDENTITY_VERIFIED,
|
||||
|
||||
@Schema(description = "O2: Legal entity confirmed via GLEIF LEI database or OpenCorporates registry. Requires a valid registrantLei in the BSM payload.")
|
||||
LEGAL_ENTITY_VERIFIED,
|
||||
|
||||
@Schema(description = "O3: Service passes technical hygiene checks — security.txt present, OpenAPI or MCP spec accessible, endpoint responding within SLA.")
|
||||
HYGIENE_VERIFIED,
|
||||
|
||||
@Schema(description = "O4: Service has demonstrated operational history and passes continuous liveness monitoring.")
|
||||
OPERATIONALLY_VERIFIED,
|
||||
|
||||
@Schema(description = "Highest level. Full independent audit completed by an APIX-accredited auditor.")
|
||||
AUDITED
|
||||
}
|
||||
|
||||
@@ -1,9 +1,22 @@
|
||||
package org.botstandards.apix.common;
|
||||
|
||||
import org.eclipse.microprofile.openapi.annotations.media.Schema;
|
||||
|
||||
@Schema(description = "Legal form of the registrant.")
|
||||
public enum OrgType {
|
||||
|
||||
@Schema(description = "Natural person acting in a personal capacity.")
|
||||
INDIVIDUAL,
|
||||
|
||||
@Schema(description = "Commercial for-profit company or corporation.")
|
||||
COMMERCIAL,
|
||||
|
||||
@Schema(description = "Non-profit or charitable organisation.")
|
||||
NON_PROFIT,
|
||||
|
||||
@Schema(description = "Government body or public authority.")
|
||||
GOVERNMENT,
|
||||
|
||||
@Schema(description = "University, research institution, or academic organisation.")
|
||||
ACADEMIC
|
||||
}
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
package org.botstandards.apix.common;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/** Public dashboard view of a sandbox — served by registry, consumed by portal. */
|
||||
public record SandboxDashboardResponse(
|
||||
String sandboxId,
|
||||
String name,
|
||||
String tier,
|
||||
int ratePerMinute,
|
||||
@JsonInclude(JsonInclude.Include.ALWAYS) Integer maxServices,
|
||||
@JsonInclude(JsonInclude.Include.ALWAYS) Integer maxOrgs,
|
||||
Instant createdAt,
|
||||
Instant expiresAt,
|
||||
/** Declared location string as provided at registration. Absent if not provided. */
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL) String registrarLocation,
|
||||
/** Resolved latitude. Absent if no location was provided or geocoding failed. */
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL) Double registrarLat,
|
||||
/** Resolved longitude. Absent if no location was provided or geocoding failed. */
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL) Double registrarLon,
|
||||
/** Cumulative event counts since sandbox creation. */
|
||||
Map<String, Long> usage,
|
||||
Instant lastActivityAt,
|
||||
/** Up to 200 most recent agent visits with resolved coordinates only (no raw IPs). */
|
||||
List<AgentVisit> recentVisits
|
||||
) {
|
||||
public record AgentVisit(double lat, double lon, Instant visitedAt) {}
|
||||
}
|
||||
@@ -1,9 +1,22 @@
|
||||
package org.botstandards.apix.common;
|
||||
|
||||
import org.eclipse.microprofile.openapi.annotations.media.Schema;
|
||||
|
||||
@Schema(description = "Lifecycle stage of a registered service. Controls visibility in default capability searches.")
|
||||
public enum ServiceStage {
|
||||
|
||||
@Schema(description = "Under active development. Not returned by default search queries. Discover with ?stage=DEVELOPMENT.")
|
||||
DEVELOPMENT,
|
||||
|
||||
@Schema(description = "Publicly available for testing but not production-ready. Discover with ?stage=BETA.")
|
||||
BETA,
|
||||
|
||||
@Schema(description = "Live and ready for autonomous agent consumption. Returned by default capability searches (no ?stage= parameter required).")
|
||||
PRODUCTION,
|
||||
|
||||
@Schema(description = "Scheduled for decommission. A sunsetAt date and replacement service IDs should be set. Still operational but agents should plan migration.")
|
||||
DEPRECATED,
|
||||
|
||||
@Schema(description = "Retired and no longer operational. Kept in the registry for historical reference and to support replacement chain lookups.")
|
||||
DECOMMISSIONED
|
||||
}
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.botstandards</groupId>
|
||||
<artifactId>apix-parent</artifactId>
|
||||
<version>${revision}</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>apix-demo</artifactId>
|
||||
<name>APIX :: Demo</name>
|
||||
<description>Sandbox-scoped mock service layer. Each sandbox configures realistic endpoints
|
||||
with declared latency, rate limits, and APX pricing. Powers the APIX demo ecosystem
|
||||
and third-party training environments.</description>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.botstandards</groupId>
|
||||
<artifactId>apix-common</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- REST server -->
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-rest-jackson</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Persistence — owns demo_config + mock_service_configs tables -->
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-hibernate-orm-panache</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-jdbc-postgresql</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-liquibase</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Rate-limit bucket reset + cache invalidation -->
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-scheduler</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Calls registry API to seed sandbox and register services -->
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-rest-client-jackson</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-smallrye-health</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-junit5</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>${quarkus.platform.group-id}</groupId>
|
||||
<artifactId>quarkus-maven-plugin</artifactId>
|
||||
<extensions>true</extensions>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>build</goal>
|
||||
<goal>generate-code</goal>
|
||||
<goal>generate-code-tests</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,34 @@
|
||||
package org.botstandards.apix.demo.client;
|
||||
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import org.botstandards.apix.common.BsmPayload;
|
||||
import org.botstandards.apix.demo.client.dto.RegistrationRequest;
|
||||
import org.botstandards.apix.demo.client.dto.SandboxCreated;
|
||||
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@RegisterRestClient(configKey = "registry")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
public interface RegistryClient {
|
||||
|
||||
@POST
|
||||
@Path("/sandbox/register")
|
||||
SandboxCreated register(RegistrationRequest request);
|
||||
|
||||
@POST
|
||||
@Path("/sandbox/{uuid}/services")
|
||||
Map<String, Object> registerService(
|
||||
@PathParam("uuid") String uuid,
|
||||
@HeaderParam("X-Api-Key") String apiKey,
|
||||
BsmPayload payload);
|
||||
|
||||
@PATCH
|
||||
@Path("/sandbox/admin/{uuid}/tier")
|
||||
Map<String, Object> promoteTier(
|
||||
@PathParam("uuid") String uuid,
|
||||
@HeaderParam("X-Admin-Key") String adminKey,
|
||||
Map<String, String> body);
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
package org.botstandards.apix.demo.client.dto;
|
||||
|
||||
/** Minimal registration payload sent to POST /sandbox/register. */
|
||||
public record RegistrationRequest(String name, String contactEmail, String location) {}
|
||||
@@ -0,0 +1,13 @@
|
||||
package org.botstandards.apix.demo.client.dto;
|
||||
|
||||
import java.time.Instant;
|
||||
|
||||
/** Response from POST /sandbox/register — only the fields the seed service needs. */
|
||||
public record SandboxCreated(
|
||||
String sandboxId,
|
||||
String name,
|
||||
String apiKey,
|
||||
String maintenanceKey,
|
||||
String tier,
|
||||
Instant expiresAt
|
||||
) {}
|
||||
@@ -0,0 +1,17 @@
|
||||
package org.botstandards.apix.demo.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.Instant;
|
||||
|
||||
@Entity
|
||||
@Table(name = "demo_config")
|
||||
public class DemoConfigEntry {
|
||||
|
||||
@Id
|
||||
public String key;
|
||||
|
||||
public String value;
|
||||
|
||||
@Column(name = "updated_at", nullable = false)
|
||||
public Instant updatedAt;
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
package org.botstandards.apix.demo.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.math.BigDecimal;
|
||||
import java.time.Instant;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "mock_service_configs")
|
||||
public class MockServiceConfig {
|
||||
|
||||
@Id
|
||||
@Column(columnDefinition = "uuid")
|
||||
public UUID id;
|
||||
|
||||
@Column(name = "sandbox_id", nullable = false)
|
||||
public String sandboxId;
|
||||
|
||||
/** Leading-slash path, e.g. /v1/address/validate */
|
||||
@Column(nullable = false)
|
||||
public String path;
|
||||
|
||||
@Column(nullable = false)
|
||||
public String method;
|
||||
|
||||
@Column(name = "latency_ms", nullable = false)
|
||||
public int latencyMs;
|
||||
|
||||
@Column(name = "jitter_pct", nullable = false)
|
||||
public int jitterPct;
|
||||
|
||||
@Column(name = "rate_per_minute", nullable = false)
|
||||
public int ratePerMinute;
|
||||
|
||||
@Column(name = "status_code", nullable = false)
|
||||
public int statusCode;
|
||||
|
||||
/** Static JSON response body returned verbatim on every call. */
|
||||
@Column(name = "response_body", nullable = false)
|
||||
public String responseBody;
|
||||
|
||||
/** APX cost surfaced in X-APX-Cost response header. */
|
||||
@Column(name = "price_apx", nullable = false)
|
||||
public BigDecimal priceApx;
|
||||
|
||||
@Column(name = "created_at", nullable = false)
|
||||
public Instant createdAt;
|
||||
}
|
||||
+52
@@ -0,0 +1,52 @@
|
||||
package org.botstandards.apix.demo.resource;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.botstandards.apix.demo.service.MockDispatcherService;
|
||||
|
||||
/**
|
||||
* Catches all requests to /{sandboxId}/{path} and routes them through the
|
||||
* sandbox-scoped mock dispatcher. Any sandbox owner can register custom
|
||||
* mock endpoints; the APIX demo sandbox is pre-seeded on first boot.
|
||||
*/
|
||||
@Path("/")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
public class MockDispatcherResource {
|
||||
|
||||
@Inject
|
||||
MockDispatcherService dispatcher;
|
||||
|
||||
@GET
|
||||
@Path("/{sandboxId}/{path: .+}")
|
||||
public Response handleGet(@PathParam("sandboxId") String sandboxId,
|
||||
@PathParam("path") String path) {
|
||||
return dispatcher.dispatch(sandboxId, path, "GET");
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/{sandboxId}/{path: .+}")
|
||||
public Response handlePost(@PathParam("sandboxId") String sandboxId,
|
||||
@PathParam("path") String path,
|
||||
String body) {
|
||||
return dispatcher.dispatch(sandboxId, path, "POST");
|
||||
}
|
||||
|
||||
@PUT
|
||||
@Path("/{sandboxId}/{path: .+}")
|
||||
public Response handlePut(@PathParam("sandboxId") String sandboxId,
|
||||
@PathParam("path") String path,
|
||||
String body) {
|
||||
return dispatcher.dispatch(sandboxId, path, "PUT");
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/{sandboxId}/{path: .+}")
|
||||
public Response handlePatch(@PathParam("sandboxId") String sandboxId,
|
||||
@PathParam("path") String path,
|
||||
String body) {
|
||||
return dispatcher.dispatch(sandboxId, path, "PATCH");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,327 @@
|
||||
package org.botstandards.apix.demo.service;
|
||||
|
||||
import io.quarkus.logging.Log;
|
||||
import io.quarkus.runtime.StartupEvent;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.enterprise.event.Observes;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.transaction.Transactional;
|
||||
import org.botstandards.apix.common.BsmPayload;
|
||||
import org.botstandards.apix.common.OrgType;
|
||||
import org.botstandards.apix.common.ServiceStage;
|
||||
import org.botstandards.apix.demo.client.RegistryClient;
|
||||
import org.botstandards.apix.demo.client.dto.RegistrationRequest;
|
||||
import org.botstandards.apix.demo.client.dto.SandboxCreated;
|
||||
import org.botstandards.apix.demo.entity.DemoConfigEntry;
|
||||
import org.botstandards.apix.demo.entity.MockServiceConfig;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.eclipse.microprofile.rest.client.inject.RestClient;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Seeds the APIX demo ecosystem on first boot.
|
||||
*
|
||||
* Flow:
|
||||
* 1. Create demo sandbox via registry API (FREE tier).
|
||||
* 2. Promote to DEMO tier via registry admin endpoint (never expires).
|
||||
* 3. Register all 17 services in the sandbox via registry API.
|
||||
* 4. Insert corresponding mock_service_configs (ON CONFLICT DO NOTHING).
|
||||
* 5. Mark seeded in demo_config.
|
||||
*
|
||||
* Subsequent boots are no-ops. Step 3 tolerates partial failures — each
|
||||
* service is attempted independently, and mock configs use ON CONFLICT DO NOTHING.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class DemoSeedService {
|
||||
|
||||
@Inject
|
||||
EntityManager em;
|
||||
|
||||
@RestClient
|
||||
@Inject
|
||||
RegistryClient registryClient;
|
||||
|
||||
@ConfigProperty(name = "apix.demo.base-url")
|
||||
String demoBaseUrl;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.admin-key")
|
||||
String adminKey;
|
||||
|
||||
void onStart(@Observes StartupEvent ev) {
|
||||
try {
|
||||
seed();
|
||||
} catch (Exception e) {
|
||||
Log.errorf(e, "Demo seed failed — will retry on next startup");
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void seed() {
|
||||
String uuid = loadConfig("demo.sandbox.uuid");
|
||||
String apiKey;
|
||||
|
||||
if (uuid == null) {
|
||||
Log.info("Creating APIX demo sandbox...");
|
||||
SandboxCreated result = registryClient.register(new RegistrationRequest(
|
||||
"apix-demo-ecosystem",
|
||||
"demo@api-index.org",
|
||||
"Global Demo Ecosystem"));
|
||||
uuid = result.sandboxId();
|
||||
apiKey = result.apiKey();
|
||||
saveConfig("demo.sandbox.uuid", uuid);
|
||||
saveConfig("demo.sandbox.api-key", apiKey);
|
||||
saveConfig("demo.sandbox.maintenance-key", result.maintenanceKey());
|
||||
|
||||
registryClient.promoteTier(uuid, adminKey, Map.of("tier", "DEMO"));
|
||||
Log.infof("Demo sandbox created: %s", uuid);
|
||||
} else {
|
||||
apiKey = loadConfig("demo.sandbox.api-key");
|
||||
}
|
||||
|
||||
if ("true".equals(loadConfig("demo.seeded"))) {
|
||||
Log.infof("Demo ecosystem already seeded at sandbox %s", uuid);
|
||||
return;
|
||||
}
|
||||
|
||||
String base = demoBaseUrl + "/" + uuid;
|
||||
int servicesRegistered = 0;
|
||||
int configsInserted = 0;
|
||||
|
||||
for (EndpointSpec spec : ENDPOINTS) {
|
||||
try {
|
||||
registryClient.registerService(uuid, apiKey, buildBsm(spec, base));
|
||||
servicesRegistered++;
|
||||
} catch (Exception e) {
|
||||
Log.warnf("Service registration skipped (%s %s): %s", spec.method(), spec.path(), e.getMessage());
|
||||
}
|
||||
|
||||
int rows = em.createNativeQuery("""
|
||||
INSERT INTO mock_service_configs
|
||||
(id, sandbox_id, path, method, latency_ms, jitter_pct,
|
||||
rate_per_minute, status_code, response_body, price_apx, created_at)
|
||||
VALUES (gen_random_uuid(), :sid, :path, :method, :latency, :jitter,
|
||||
:rate, 200, :response, :price, now())
|
||||
ON CONFLICT (sandbox_id, path, method) DO NOTHING
|
||||
""")
|
||||
.setParameter("sid", uuid)
|
||||
.setParameter("path", spec.path())
|
||||
.setParameter("method", spec.method())
|
||||
.setParameter("latency", spec.latencyMs())
|
||||
.setParameter("jitter", spec.jitterPct())
|
||||
.setParameter("rate", spec.ratePm())
|
||||
.setParameter("response", spec.responseBody())
|
||||
.setParameter("price", spec.priceApx())
|
||||
.executeUpdate();
|
||||
configsInserted += rows;
|
||||
}
|
||||
|
||||
saveConfig("demo.seeded", "true");
|
||||
Log.infof("Demo ecosystem seeded: %d services registered, %d mock configs inserted at %s/%s",
|
||||
servicesRegistered, configsInserted, demoBaseUrl, uuid);
|
||||
}
|
||||
|
||||
// ── Config store ──────────────────────────────────────────────────────────
|
||||
|
||||
private String loadConfig(String key) {
|
||||
DemoConfigEntry e = em.find(DemoConfigEntry.class, key);
|
||||
return e == null ? null : e.value;
|
||||
}
|
||||
|
||||
private void saveConfig(String key, String value) {
|
||||
DemoConfigEntry e = em.find(DemoConfigEntry.class, key);
|
||||
if (e == null) {
|
||||
e = new DemoConfigEntry();
|
||||
e.key = key;
|
||||
}
|
||||
e.value = value;
|
||||
e.updatedAt = Instant.now();
|
||||
em.merge(e);
|
||||
}
|
||||
|
||||
// ── BSM builder ───────────────────────────────────────────────────────────
|
||||
|
||||
private static BsmPayload buildBsm(EndpointSpec spec, String base) {
|
||||
return new BsmPayload(
|
||||
spec.serviceName(),
|
||||
spec.description(),
|
||||
base + spec.path(),
|
||||
spec.capabilities(),
|
||||
spec.regEmail(),
|
||||
spec.regName(),
|
||||
spec.regJurisdiction(),
|
||||
OrgType.COMMERCIAL,
|
||||
null, null, null, null, null,
|
||||
new BsmPayload.Pricing("PER_CALL", spec.priceApx(), "APX", "per-call"),
|
||||
"0.1",
|
||||
ServiceStage.PRODUCTION,
|
||||
null, null, null, null,
|
||||
Map.of(
|
||||
"declaredLatencyMs", spec.latencyMs(),
|
||||
"ratePerMinute", spec.ratePm(),
|
||||
"workflow", spec.workflow(),
|
||||
"mockEndpoint", true));
|
||||
}
|
||||
|
||||
// ── Endpoint specs ────────────────────────────────────────────────────────
|
||||
|
||||
private record EndpointSpec(
|
||||
String method, String path,
|
||||
int latencyMs, int jitterPct, int ratePm,
|
||||
BigDecimal priceApx,
|
||||
String responseBody,
|
||||
String serviceName, String description,
|
||||
List<String> capabilities,
|
||||
String regEmail, String regName, String regJurisdiction,
|
||||
String workflow) {}
|
||||
|
||||
private static final List<EndpointSpec> ENDPOINTS = List.of(
|
||||
|
||||
// ── W1: Cross-border Fulfilment ───────────────────────────────────────
|
||||
|
||||
new EndpointSpec("POST", "/v1/address/validate",
|
||||
80, 10, 600, new BigDecimal("0.0010"),
|
||||
"""
|
||||
{"valid":true,"standardized":{"street":"Maximilianstra\\u00dfe 1","city":"M\\u00fcnchen","postalCode":"80539","countryCode":"DE","coordinates":{"lat":48.1391,"lon":11.5802}},"confidence":0.97,"normalizedFormat":"DIN_5008"}""",
|
||||
"Address Validation", "Validates and standardises postal addresses to ISO/DIN format. Returns geocoordinates and confidence score.",
|
||||
List.of("address-validation", "geocoding"),
|
||||
"demo-databridge@api-index.org", "DataBridge Inc", "US", "W1"),
|
||||
|
||||
new EndpointSpec("GET", "/v1/customs/tariff",
|
||||
300, 10, 120, new BigDecimal("0.0080"),
|
||||
"""
|
||||
{"hsCode":"8471.30","description":"Portable automatic data-processing machines","dutyRate":0.0,"vatRate":0.19,"specialMeasures":[],"regulation":"EU Tariff 2024/1234","currency":"EUR"}""",
|
||||
"Customs Tariff Lookup", "Returns HS tariff codes, duty rates, and VAT rates for cross-border goods classification.",
|
||||
List.of("customs-lookup", "trade-compliance", "hs-classification"),
|
||||
"demo-eutariff@api-index.org", "EuTariff BV", "NL", "W1"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/carrier/nord/quote",
|
||||
150, 10, 300, new BigDecimal("0.0120"),
|
||||
"""
|
||||
{"carrier":"NordLogistik GmbH","service":"Economy EU","estimatedDays":5,"price":{"amount":12.40,"currency":"APX"},"trackingAvailable":true,"cutoffTime":"16:00 CET","quoteId":"NL-Q-DEMO-0017"}""",
|
||||
"NordLogistik Shipping Quote", "Economy EU shipping quotes from NordLogistik. Specialised in intra-European road and rail freight.",
|
||||
List.of("shipping-quote", "logistics", "eu-delivery"),
|
||||
"demo-nordlogistik@api-index.org", "NordLogistik GmbH", "DE", "W1"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/carrier/swift/quote",
|
||||
120, 10, 300, new BigDecimal("0.0350"),
|
||||
"""
|
||||
{"carrier":"SwiftCargo Ltd","service":"Express Global","estimatedDays":2,"price":{"amount":35.00,"currency":"APX"},"trackingAvailable":true,"cutoffTime":"14:00 GMT","quoteId":"SC-Q-DEMO-0043"}""",
|
||||
"SwiftCargo Express Quote", "Global express shipping quotes. 2-day delivery to major hubs worldwide.",
|
||||
List.of("shipping-quote", "express-delivery", "global-logistics"),
|
||||
"demo-swiftcargo@api-index.org", "SwiftCargo Ltd", "GB", "W1"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/carrier/pacrim/quote",
|
||||
200, 10, 200, new BigDecimal("0.0180"),
|
||||
"""
|
||||
{"carrier":"PacRim Express Pte","service":"Asia-Pacific Economy","estimatedDays":7,"price":{"amount":18.20,"currency":"APX"},"trackingAvailable":true,"cutoffTime":"18:00 SGT","quoteId":"PR-Q-DEMO-0009"}""",
|
||||
"PacRim Express Quote", "Economy Asia-Pacific shipping quotes. Cost-optimised routes covering SEA, JP, KR, AU.",
|
||||
List.of("shipping-quote", "logistics", "asia-pacific-delivery"),
|
||||
"demo-pacrim@api-index.org", "PacRim Express Pte", "SG", "W1"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/shipment/label",
|
||||
250, 10, 200, new BigDecimal("0.0150"),
|
||||
"""
|
||||
{"labelId":"SHP-DEMO-7734","trackingNumber":"NL1234567890DE","carrier":"NordLogistik GmbH","labelFormat":"PDF","estimatedPickup":"2026-05-15T16:00:00Z"}""",
|
||||
"Shipment Label Generation", "Generates carrier-compliant shipping labels and assigns a tracking number.",
|
||||
List.of("shipment-label", "logistics", "label-generation"),
|
||||
"demo-nordlogistik@api-index.org", "NordLogistik GmbH", "DE", "W1"),
|
||||
|
||||
new EndpointSpec("GET", "/v1/shipment/track",
|
||||
100, 10, 500, new BigDecimal("0.0030"),
|
||||
"""
|
||||
{"trackingNumber":"NL1234567890DE","status":"IN_TRANSIT","currentLocation":"Frankfurt Hub, DE","estimatedDelivery":"2026-05-19T18:00:00Z","events":[{"timestamp":"2026-05-14T14:23:00Z","location":"Munich Depot","description":"Parcel collected"},{"timestamp":"2026-05-14T22:15:00Z","location":"Frankfurt Hub","description":"In transit to destination hub"}]}""",
|
||||
"Shipment Tracking", "Real-time shipment status and event history by tracking number.",
|
||||
List.of("shipment-tracking", "logistics"),
|
||||
"demo-nordlogistik@api-index.org", "NordLogistik GmbH", "DE", "W1"),
|
||||
|
||||
// ── W2: Micro-lending Decision ────────────────────────────────────────
|
||||
|
||||
new EndpointSpec("POST", "/v1/identity/enrich",
|
||||
350, 10, 200, new BigDecimal("0.0200"),
|
||||
"""
|
||||
{"enrichedName":"Johann K. M\\u00fcller","dateOfBirth":"1984-03-12","currentCity":"Berlin","country":"DE","verificationScore":0.91,"enrichmentId":"IE-DEMO-0552"}""",
|
||||
"Identity Enrichment", "Enriches a person's identity with verified structured data. Input: name + date of birth. Output: normalised profile with verification score.",
|
||||
List.of("identity-enrichment", "kyc", "data-enrichment"),
|
||||
"demo-databridge@api-index.org", "DataBridge Inc", "US", "W2"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/credit/signal",
|
||||
450, 10, 60, new BigDecimal("0.0250"),
|
||||
"""
|
||||
{"enrichmentId":"IE-DEMO-0552","creditScore":720,"riskBand":"LOW","factors":[{"key":"payment_history","score":0.95,"weight":0.35},{"key":"credit_utilization","score":0.72,"weight":0.30},{"key":"account_age","score":0.88,"weight":0.15}],"reportedAt":"2026-05-14T09:00:00Z"}""",
|
||||
"Credit Signal Aggregation", "Aggregates bureau signals into a single credit score and risk band. Requires prior identity enrichment ID.",
|
||||
List.of("credit-scoring", "risk-assessment", "financial-analytics"),
|
||||
"demo-trustscore@api-index.org", "TrustScore AG", "CH", "W2"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/fraud/score",
|
||||
380, 10, 90, new BigDecimal("0.0400"),
|
||||
"""
|
||||
{"requestId":"FS-DEMO-1103","fraudScore":12,"riskLevel":"MINIMAL","flags":[],"velocityCheck":"PASS","deviceFingerprint":"CONSISTENT","recommendedAction":"PROCEED"}""",
|
||||
"Fraud Scoring", "Scores a transaction or application for fraud risk. Returns 0–100 score, risk level, and triggered flags.",
|
||||
List.of("fraud-detection", "risk-scoring", "transaction-security"),
|
||||
"demo-trustscore@api-index.org", "TrustScore AG", "CH", "W2"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/lending/offer",
|
||||
200, 10, 120, new BigDecimal("0.0300"),
|
||||
"""
|
||||
{"offerId":"LO-DEMO-2287","approvedAmount":15000.00,"currency":"APX","interestRateAnnual":0.049,"termMonths":36,"monthlyPayment":448.32,"totalRepayable":16139.52,"validUntil":"2026-05-21T23:59:59Z"}""",
|
||||
"Lending Offer Engine", "Generates a personalised loan offer based on credit score and requested amount.",
|
||||
List.of("loan-origination", "lending", "credit-decision"),
|
||||
"demo-lendfast@api-index.org", "LendFast GmbH", "DE", "W2"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/contract/acknowledge",
|
||||
150, 10, 300, new BigDecimal("0.0050"),
|
||||
"""
|
||||
{"contractId":"CTR-DEMO-0091","offerId":"LO-DEMO-2287","status":"PENDING_SIGNATURE","expiresAt":"2026-05-21T23:59:59Z","signingProvider":"DemoSign"}""",
|
||||
"Contract Acknowledgement", "Initiates a digital contract signing workflow for an accepted lending offer.",
|
||||
List.of("contract-management", "e-signature", "document-workflow"),
|
||||
"demo-lendfast@api-index.org", "LendFast GmbH", "DE", "W2"),
|
||||
|
||||
// ── W3: Healthcare Referral ───────────────────────────────────────────
|
||||
|
||||
new EndpointSpec("POST", "/v1/symptom/triage",
|
||||
500, 10, 120, new BigDecimal("0.0150"),
|
||||
"""
|
||||
{"triageId":"TR-DEMO-0834","urgency":"ROUTINE","urgencyScore":3,"specialtyRequired":"CARDIOLOGY","recommendedTimeframe":"within_2_weeks","disclaimer":"Non-clinical triage assessment. Consult a physician for medical advice."}""",
|
||||
"Symptom Triage", "Non-clinical AI triage: maps reported symptoms to urgency level and required medical specialty.",
|
||||
List.of("medical-triage", "symptom-assessment", "healthcare"),
|
||||
"demo-mednet@api-index.org", "MedNet Systems", "NL", "W3"),
|
||||
|
||||
new EndpointSpec("GET", "/v1/specialist/availability",
|
||||
250, 10, 180, new BigDecimal("0.0100"),
|
||||
"""
|
||||
{"specialty":"CARDIOLOGY","slots":[{"slotId":"SLT-20260520-0900","practitioner":"Dr. Helena Brandt","datetime":"2026-05-20T09:00:00Z","location":"Charit\\u00e9 Outpatient, Berlin","type":"IN_PERSON"},{"slotId":"SLT-20260520-1400","practitioner":"Dr. Markus Frei","datetime":"2026-05-20T14:00:00Z","type":"VIDEO"}],"nextAvailable":"2026-05-20T09:00:00Z"}""",
|
||||
"Specialist Availability", "Returns available appointment slots for a given medical specialty and location.",
|
||||
List.of("appointment-booking", "specialist-scheduling", "healthcare"),
|
||||
"demo-mednet@api-index.org", "MedNet Systems", "NL", "W3"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/insurance/eligibility",
|
||||
320, 10, 150, new BigDecimal("0.0220"),
|
||||
"""
|
||||
{"covered":true,"coveragePercent":80,"copay":25.00,"currency":"APX","preAuthRequired":true,"preAuthCode":null,"planName":"DemoPlan Premium","validThrough":"2026-12-31"}""",
|
||||
"Insurance Eligibility Check", "Verifies patient insurance coverage for a given service code. Returns copay, coverage %, and pre-auth requirements.",
|
||||
List.of("insurance-verification", "coverage-check", "healthcare-billing"),
|
||||
"demo-insubridge@api-index.org", "InsuBridge AG", "CH", "W3"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/appointment/reserve",
|
||||
200, 10, 200, new BigDecimal("0.0180"),
|
||||
"""
|
||||
{"appointmentId":"APT-DEMO-0900","slotId":"SLT-20260520-0900","confirmationCode":"CONF-4471-X","practitioner":"Dr. Helena Brandt","datetime":"2026-05-20T09:00:00Z","location":"Charit\\u00e9 Outpatient, Charit\\u00e9platz 1, 10117 Berlin"}""",
|
||||
"Appointment Reservation", "Reserves a specialist appointment slot and returns a confirmation code.",
|
||||
List.of("appointment-booking", "scheduling", "healthcare"),
|
||||
"demo-insubridge@api-index.org", "InsuBridge AG", "CH", "W3"),
|
||||
|
||||
new EndpointSpec("POST", "/v1/prescription/preauth",
|
||||
600, 10, 30, new BigDecimal("0.0750"),
|
||||
"""
|
||||
{"authId":"PA-2026-48821","authorized":true,"medication":"Bisoprolol 5mg","diagnosis":"Suspected stable angina (I25.1)","authorizedQuantity":30,"refillsAllowed":2,"validUntil":"2026-06-14T23:59:59Z","dispensingInstructions":"Once daily with food. Blood pressure monitoring required."}""",
|
||||
"Prescription Pre-authorisation", "Issues pre-authorisation for prescribed medications. Highest latency — regulatory validation required. Rate-limited to 30/min.",
|
||||
List.of("prescription-authorization", "pharmacy", "healthcare"),
|
||||
"demo-rxchain@api-index.org", "RxChain Corp", "JP", "W3")
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
package org.botstandards.apix.demo.service;
|
||||
|
||||
import io.quarkus.scheduler.Scheduled;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.botstandards.apix.demo.entity.MockServiceConfig;
|
||||
|
||||
@ApplicationScoped
|
||||
public class MockDispatcherService {
|
||||
|
||||
@Inject
|
||||
EntityManager em;
|
||||
|
||||
@Inject
|
||||
RateLimiterService rateLimiter;
|
||||
|
||||
private final Random rng = new Random();
|
||||
|
||||
/** (sandboxId:path:method) → Optional<config>. Empty means "confirmed not found". */
|
||||
private final ConcurrentHashMap<String, Optional<MockServiceConfig>> configCache =
|
||||
new ConcurrentHashMap<>();
|
||||
|
||||
@Scheduled(every = "5M")
|
||||
void invalidateCache() {
|
||||
configCache.clear();
|
||||
}
|
||||
|
||||
public Response dispatch(String sandboxId, String path, String method) {
|
||||
String fullPath = "/" + path;
|
||||
MockServiceConfig cfg = resolveConfig(sandboxId, fullPath, method.toUpperCase());
|
||||
|
||||
if (cfg == null) {
|
||||
return Response.status(404)
|
||||
.entity(Map.of(
|
||||
"message", method.toUpperCase() + " " + fullPath + " not configured in sandbox " + sandboxId,
|
||||
"hint", "Register a mock config via the APIX demo API"))
|
||||
.build();
|
||||
}
|
||||
|
||||
if (!rateLimiter.allow(sandboxId, fullPath, method.toUpperCase(), cfg.ratePerMinute)) {
|
||||
return Response.status(429)
|
||||
.header("Retry-After", "60")
|
||||
.entity(Map.of(
|
||||
"message", "Rate limit exceeded",
|
||||
"limitPerMinute", cfg.ratePerMinute,
|
||||
"retryAfterSeconds", 60))
|
||||
.build();
|
||||
}
|
||||
|
||||
int jitter = (int) (cfg.latencyMs * (cfg.jitterPct / 100.0) * (rng.nextDouble() * 2 - 1));
|
||||
int delay = Math.max(0, cfg.latencyMs + jitter);
|
||||
try {
|
||||
Thread.sleep(delay);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
|
||||
return Response.status(cfg.statusCode)
|
||||
.entity(cfg.responseBody)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("X-APX-Cost", cfg.priceApx.toPlainString())
|
||||
.header("X-APX-Latency-Ms", String.valueOf(delay))
|
||||
.header("X-APX-Sandbox", sandboxId)
|
||||
.build();
|
||||
}
|
||||
|
||||
private MockServiceConfig resolveConfig(String sandboxId, String path, String method) {
|
||||
String key = sandboxId + ":" + path + ":" + method;
|
||||
return configCache.computeIfAbsent(key, k ->
|
||||
em.createQuery(
|
||||
"SELECT c FROM MockServiceConfig c " +
|
||||
"WHERE c.sandboxId = :sid AND c.path = :path AND c.method = :method",
|
||||
MockServiceConfig.class)
|
||||
.setParameter("sid", sandboxId)
|
||||
.setParameter("path", path)
|
||||
.setParameter("method", method)
|
||||
.getResultList()
|
||||
.stream().findFirst()
|
||||
).orElse(null);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package org.botstandards.apix.demo.service;
|
||||
|
||||
import io.quarkus.scheduler.Scheduled;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* Fixed-window per-minute rate limiter keyed by (sandboxId, path, method).
|
||||
* Resets all buckets at the start of each minute. Good enough for demo purposes —
|
||||
* a token-bucket implementation would reduce burst risk in production.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class RateLimiterService {
|
||||
|
||||
private final ConcurrentHashMap<String, AtomicInteger> counters = new ConcurrentHashMap<>();
|
||||
|
||||
/** Returns true if the request is within the declared rate limit. */
|
||||
public boolean allow(String sandboxId, String path, String method, int ratePerMinute) {
|
||||
String key = sandboxId + ":" + path + ":" + method;
|
||||
return counters.computeIfAbsent(key, k -> new AtomicInteger(0))
|
||||
.incrementAndGet() <= ratePerMinute;
|
||||
}
|
||||
|
||||
@Scheduled(cron = "0 * * * * ?")
|
||||
void resetBuckets() {
|
||||
counters.clear();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
quarkus.http.port=8083
|
||||
quarkus.smallrye-health.root-path=/q/health
|
||||
quarkus.log.level=${LOG_LEVEL:INFO}
|
||||
|
||||
# DB — shares the same PostgreSQL instance as the registry
|
||||
quarkus.datasource.db-kind=postgresql
|
||||
quarkus.datasource.jdbc.url=${DB_URL:jdbc:postgresql://localhost:5432/apix}
|
||||
quarkus.datasource.username=${DB_USER:apix}
|
||||
quarkus.datasource.password=${DB_PASSWORD:apix}
|
||||
quarkus.hibernate-orm.database.generation=none
|
||||
|
||||
# Liquibase — demo owns demo_config + mock_service_configs
|
||||
quarkus.liquibase.change-log=db/changelog/db.changelog-master.xml
|
||||
quarkus.liquibase.migrate-at-start=true
|
||||
|
||||
# Registry REST client — seed service calls the registry to create sandbox and register services
|
||||
quarkus.rest-client.registry.url=${APIX_REGISTRY_URL:http://registry:8180}
|
||||
quarkus.rest-client.registry.connect-timeout=5000
|
||||
quarkus.rest-client.registry.read-timeout=10000
|
||||
|
||||
# Demo config
|
||||
apix.demo.base-url=${APIX_DEMO_BASE_URL:https://demo.api-index.org}
|
||||
apix.registry.admin-key=${APIX_API_KEY:dev-admin-key}
|
||||
@@ -0,0 +1,74 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="demo-001" author="apix-demo">
|
||||
|
||||
<!-- Key-value store for demo bootstrap state (sandbox UUID, seed flag, etc.) -->
|
||||
<createTable tableName="demo_config">
|
||||
<column name="key" type="varchar(100)">
|
||||
<constraints primaryKey="true" nullable="false"/>
|
||||
</column>
|
||||
<column name="value" type="text"/>
|
||||
<column name="updated_at" type="timestamptz" defaultValueComputed="now()">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<!-- Sandbox-scoped mock endpoint configurations.
|
||||
Any sandbox owner can register mock endpoints here; the demo
|
||||
ecosystem seed pre-populates the APIX demo sandbox on first boot. -->
|
||||
<createTable tableName="mock_service_configs">
|
||||
<column name="id" type="uuid" defaultValueComputed="gen_random_uuid()">
|
||||
<constraints primaryKey="true" nullable="false"/>
|
||||
</column>
|
||||
<column name="sandbox_id" type="varchar(100)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Leading slash, e.g. /v1/address/validate -->
|
||||
<column name="path" type="varchar(255)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="method" type="varchar(10)" defaultValue="POST">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="latency_ms" type="int" defaultValueNumeric="200">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Applied as ±jitter_pct% random deviation around latency_ms -->
|
||||
<column name="jitter_pct" type="int" defaultValueNumeric="10">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="rate_per_minute" type="int" defaultValueNumeric="100">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="status_code" type="int" defaultValueNumeric="200">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Static JSON body returned verbatim on every call -->
|
||||
<column name="response_body" type="text">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Cost surfaced in X-APX-Cost response header -->
|
||||
<column name="price_apx" type="numeric(10,4)" defaultValueNumeric="0.0100">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="created_at" type="timestamptz" defaultValueComputed="now()">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<addUniqueConstraint tableName="mock_service_configs"
|
||||
columnNames="sandbox_id, path, method"
|
||||
constraintName="uq_mock_config_sandbox_path_method"/>
|
||||
|
||||
<createIndex tableName="mock_service_configs" indexName="idx_mock_configs_sandbox">
|
||||
<column name="sandbox_id"/>
|
||||
</createIndex>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<include file="changes/001-demo-schema.xml" relativeToChangelogFile="true"/>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,19 @@
|
||||
package org.botstandards.apix.portal.client;
|
||||
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.PathParam;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import org.botstandards.apix.common.SandboxDashboardResponse;
|
||||
import org.eclipse.microprofile.rest.client.inject.RegisterRestClient;
|
||||
|
||||
@RegisterRestClient(configKey = "registry")
|
||||
@Path("/sandbox")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public interface RegistryClient {
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}")
|
||||
SandboxDashboardResponse getDashboard(@PathParam("uuid") String uuid);
|
||||
}
|
||||
+62
@@ -0,0 +1,62 @@
|
||||
package org.botstandards.apix.portal.resource;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.quarkus.qute.CheckedTemplate;
|
||||
import io.quarkus.qute.TemplateInstance;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.botstandards.apix.common.SandboxDashboardResponse;
|
||||
import org.eclipse.microprofile.rest.client.inject.RestClient;
|
||||
import org.botstandards.apix.portal.client.RegistryClient;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
@Path("/sandbox")
|
||||
public class DashboardResource {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(DashboardResource.class);
|
||||
|
||||
@Inject
|
||||
@RestClient
|
||||
RegistryClient registryClient;
|
||||
|
||||
@Inject
|
||||
ObjectMapper objectMapper;
|
||||
|
||||
@CheckedTemplate
|
||||
static class Templates {
|
||||
static native TemplateInstance dashboard(SandboxDashboardResponse dashboard, String dataJson);
|
||||
static native TemplateInstance notFound(String uuid);
|
||||
static native TemplateInstance error(String uuid, String message);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}")
|
||||
@Produces(MediaType.TEXT_HTML)
|
||||
public TemplateInstance dashboard(@PathParam("uuid") String uuid) {
|
||||
SandboxDashboardResponse dashboard;
|
||||
try {
|
||||
dashboard = registryClient.getDashboard(uuid);
|
||||
} catch (WebApplicationException e) {
|
||||
int status = e.getResponse().getStatus();
|
||||
if (status == 404) return Templates.notFound(uuid);
|
||||
if (status == 400) return Templates.notFound(uuid);
|
||||
LOG.errorf("Registry error fetching sandbox %s: HTTP %d", uuid, status);
|
||||
return Templates.error(uuid, "Registry unavailable");
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, "Failed to fetch sandbox %s from registry", uuid);
|
||||
return Templates.error(uuid, "Could not reach the registry");
|
||||
}
|
||||
|
||||
try {
|
||||
// Jackson produces safe JSON; replace </ to prevent </script> injection
|
||||
String raw = objectMapper.writeValueAsString(dashboard);
|
||||
String safe = raw.replace("</", "<\\/");
|
||||
return Templates.dashboard(dashboard, safe);
|
||||
} catch (Exception e) {
|
||||
throw new WebApplicationException(Response.serverError()
|
||||
.entity("Failed to serialize dashboard data").build());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,565 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>API Index — Global Discovery Infrastructure for Autonomous Agents</title>
|
||||
<meta name="description" content="A machine-readable, always-current index of agent-consumable services. The global discovery infrastructure for autonomous agents.">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
:root {
|
||||
--black: #0a0a0a;
|
||||
--gray: #4a4a4a;
|
||||
--light: #f5f5f5;
|
||||
--border: #e0e0e0;
|
||||
--accent: #1a1a1a;
|
||||
--mono: "JetBrains Mono", "Fira Code", "Courier New", monospace;
|
||||
--sans: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: var(--sans);
|
||||
background: #ffffff;
|
||||
color: var(--black);
|
||||
line-height: 1.6;
|
||||
min-height: 100vh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
header {
|
||||
border-bottom: 1px solid var(--border);
|
||||
padding: 1.5rem 2rem;
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
header .wordmark {
|
||||
font-family: var(--mono);
|
||||
font-size: 0.95rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.02em;
|
||||
color: var(--black);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
header .tagline {
|
||||
font-size: 0.8rem;
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
max-width: 680px;
|
||||
margin: 0 auto;
|
||||
padding: 4rem 2rem;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.75rem;
|
||||
font-weight: 700;
|
||||
letter-spacing: -0.02em;
|
||||
line-height: 1.25;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
|
||||
.lead {
|
||||
font-size: 1.05rem;
|
||||
color: var(--gray);
|
||||
margin-bottom: 2.5rem;
|
||||
max-width: 540px;
|
||||
}
|
||||
|
||||
.problem {
|
||||
background: var(--light);
|
||||
border-left: 3px solid var(--black);
|
||||
padding: 1.25rem 1.5rem;
|
||||
margin-bottom: 2.5rem;
|
||||
font-size: 0.95rem;
|
||||
}
|
||||
|
||||
.problem p + p {
|
||||
margin-top: 0.75rem;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.1em;
|
||||
text-transform: uppercase;
|
||||
color: var(--gray);
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.features {
|
||||
display: grid;
|
||||
gap: 1.25rem;
|
||||
margin-bottom: 2.5rem;
|
||||
}
|
||||
|
||||
.feature {
|
||||
border: 1px solid var(--border);
|
||||
padding: 1.25rem;
|
||||
}
|
||||
|
||||
.feature .label {
|
||||
font-family: var(--mono);
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
color: var(--gray);
|
||||
margin-bottom: 0.4rem;
|
||||
}
|
||||
|
||||
.feature p {
|
||||
font-size: 0.9rem;
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
.draft-suite {
|
||||
border: 1px solid var(--border);
|
||||
margin-bottom: 2.5rem;
|
||||
}
|
||||
|
||||
.draft-item {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 1.25rem;
|
||||
padding: 1rem 1.25rem;
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.draft-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.draft-item .label {
|
||||
font-family: var(--mono);
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
color: var(--gray);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
white-space: nowrap;
|
||||
padding-top: 0.15rem;
|
||||
min-width: 120px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.draft-item .content {
|
||||
font-size: 0.9rem;
|
||||
min-width: 0;
|
||||
overflow-wrap: break-word;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.draft-item .content a {
|
||||
color: var(--black);
|
||||
text-decoration: underline;
|
||||
text-underline-offset: 3px;
|
||||
}
|
||||
|
||||
.draft-item .content a:hover {
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
.draft-item .content .sub {
|
||||
font-size: 0.8rem;
|
||||
color: var(--gray);
|
||||
margin-top: 0.25rem;
|
||||
}
|
||||
|
||||
.draft-item.superseded {
|
||||
background: var(--light);
|
||||
}
|
||||
|
||||
.draft-item.superseded .label {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.draft-item.superseded .content a {
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
.status {
|
||||
margin-bottom: 2.5rem;
|
||||
}
|
||||
|
||||
.status-line {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
padding: 0.6rem 0;
|
||||
border-bottom: 1px solid var(--border);
|
||||
font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.status-line:first-child {
|
||||
border-top: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.dot {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.dot.done { background: #22c55e; }
|
||||
.dot.active { background: #f59e0b; }
|
||||
.dot.pending { background: var(--border); }
|
||||
|
||||
.status-line .item { flex: 1; }
|
||||
.status-line .phase {
|
||||
font-size: 0.75rem;
|
||||
color: var(--gray);
|
||||
font-family: var(--mono);
|
||||
}
|
||||
|
||||
.contact {
|
||||
font-size: 0.875rem;
|
||||
color: var(--gray);
|
||||
}
|
||||
|
||||
.contact a {
|
||||
color: var(--black);
|
||||
text-decoration: underline;
|
||||
text-underline-offset: 3px;
|
||||
}
|
||||
|
||||
footer {
|
||||
border-top: 1px solid var(--border);
|
||||
padding: 1.25rem 2rem;
|
||||
font-size: 0.75rem;
|
||||
color: var(--gray);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
footer a {
|
||||
color: var(--gray);
|
||||
text-decoration: underline;
|
||||
text-underline-offset: 3px;
|
||||
}
|
||||
|
||||
@media (max-width: 600px) {
|
||||
header { flex-direction: column; gap: 0.25rem; }
|
||||
h1 { font-size: 1.35rem; }
|
||||
main { padding: 2.5rem 1.25rem; }
|
||||
.draft-block { flex-direction: column; gap: 0.5rem; }
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<header>
|
||||
<a class="wordmark" href="/">api-index.org</a>
|
||||
<span class="tagline">open service registry for autonomous agents</span>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
|
||||
<h1>The global discovery infrastructure<br>for autonomous agents.</h1>
|
||||
|
||||
<p class="lead">
|
||||
Autonomous agents cannot reliably find the services they need.
|
||||
The internet was built for humans — its discovery infrastructure
|
||||
assumes a human reading a screen. Agents are navigating it blind.
|
||||
</p>
|
||||
|
||||
<div class="problem">
|
||||
<p>
|
||||
The API Index is a single, globally queryable, machine-readable
|
||||
index of agent-consumable API services — with a structured trust model,
|
||||
capability-based search, and a stable entry point any agent can start from.
|
||||
</p>
|
||||
<p>
|
||||
Discovery is always free for consuming agents. The index is governed by
|
||||
the <a href="https://botstandards.org" target="_blank" rel="noopener noreferrer">Bot Standards Foundation</a>, a neutral
|
||||
non-profit Swiss Stiftung.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<h2>What the index provides</h2>
|
||||
|
||||
<div class="features">
|
||||
<div class="feature">
|
||||
<div class="label">Single entry point</div>
|
||||
<p>One stable global URL. Any agent navigates the full index
|
||||
from here via HATEOAS hypermedia links — no prior knowledge required.</p>
|
||||
</div>
|
||||
<div class="feature">
|
||||
<div class="label">Capability search</div>
|
||||
<p>Find services by what they do — not by name or URL.
|
||||
Structured taxonomy from <code>data.legal</code> to <code>nlp</code>
|
||||
to <code>iot</code> and beyond.</p>
|
||||
</div>
|
||||
<div class="feature">
|
||||
<div class="label">Three-dimensional trust model</div>
|
||||
<p>Verified organisation identity · Automated service verification ·
|
||||
Continuous liveness monitoring. Agents apply their own trust policy
|
||||
against verifiable metadata.</p>
|
||||
</div>
|
||||
<div class="feature">
|
||||
<div class="label">Open standard</div>
|
||||
<p>APIX Manifest (APM) supports OpenAPI, MCP, AsyncAPI, and
|
||||
GraphQL. Published under open licence. No proprietary formats.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>IETF Internet-Drafts</h2>
|
||||
|
||||
<div class="draft-suite">
|
||||
<div class="draft-item">
|
||||
<span class="label">Core</span>
|
||||
<div class="content">
|
||||
<a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-core/" target="_blank" rel="noopener noreferrer">
|
||||
draft-rehfeld-apix-core
|
||||
</a>
|
||||
<div class="sub">Core infrastructure, trust model, Index API, operator governance · April 2026</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">Services</span>
|
||||
<div class="content">
|
||||
<a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-services/" target="_blank" rel="noopener noreferrer">
|
||||
draft-rehfeld-apix-services
|
||||
</a>
|
||||
<div class="sub">Web API and bot service registration profile, capability taxonomy, notification channels · April 2026</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">IoT</span>
|
||||
<div class="content">
|
||||
<a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-iot/" target="_blank" rel="noopener noreferrer">
|
||||
draft-rehfeld-apix-iot
|
||||
</a>
|
||||
<div class="sub">IoT device class and instance registration, presence signalling, agent delegation · April 2026</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item superseded">
|
||||
<span class="label">Superseded</span>
|
||||
<div class="content">
|
||||
<a href="https://datatracker.ietf.org/doc/draft-rehfeld-bot-service-index/" target="_blank" rel="noopener noreferrer">
|
||||
draft-rehfeld-bot-service-index
|
||||
</a>
|
||||
<div class="sub">Supersession notice for prior revision · redirects to the three drafts above</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>Current status</h2>
|
||||
|
||||
<div class="status">
|
||||
<div class="status-line">
|
||||
<span class="dot done"></span>
|
||||
<span class="item">Internet-Draft submitted to IETF</span>
|
||||
<span class="phase">Phase 0</span>
|
||||
</div>
|
||||
<div class="status-line">
|
||||
<span class="dot done"></span>
|
||||
<span class="item">IETF Dispatch posted · community engagement underway · May 2026</span>
|
||||
<span class="phase">Phase 0</span>
|
||||
</div>
|
||||
<div class="status-line">
|
||||
<span class="dot active"></span>
|
||||
<span class="item">Bot Standards Foundation incorporation underway</span>
|
||||
<span class="phase">Phase 1</span>
|
||||
</div>
|
||||
<div class="status-line">
|
||||
<span class="dot pending"></span>
|
||||
<span class="item">Founding member programme open</span>
|
||||
<span class="phase">Phase 2</span>
|
||||
</div>
|
||||
<div class="status-line">
|
||||
<span class="dot done"></span>
|
||||
<span class="item">Reference implementation live · public sandbox available</span>
|
||||
<span class="phase">Phase 3</span>
|
||||
</div>
|
||||
<div class="status-line">
|
||||
<span class="dot done"></span>
|
||||
<span class="item">Index live at api-index.org · May 2026</span>
|
||||
<span class="phase">Phase 3</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>Try it</h2>
|
||||
|
||||
<div class="draft-suite" style="margin-bottom:2.5rem">
|
||||
<div class="draft-item">
|
||||
<span class="label">API root</span>
|
||||
<div class="content">
|
||||
<a href="https://api-index.org/" target="_blank" rel="noopener noreferrer">api-index.org/</a>
|
||||
<div class="sub">HATEOAS entry point — any agent or HTTP client can start here.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">Sandbox</span>
|
||||
<div class="content">
|
||||
<code>POST api-index.org/sandbox/register</code>
|
||||
<div class="sub">Register an isolated test namespace. No KYC. Returns a sandbox UUID and API key immediately. Name is a display label — not unique, not used for routing. Body: <code>{"name":"my-agent","contactEmail":"you@example.com"}</code></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">OpenAPI</span>
|
||||
<div class="content">
|
||||
<a href="https://api-index.org/q/openapi" target="_blank" rel="noopener noreferrer">api-index.org/q/openapi</a>
|
||||
<div class="sub">Full OpenAPI specification for the registry API.</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h2>How to integrate APIX into your agent</h2>
|
||||
|
||||
<div class="problem" style="margin-bottom:1.5rem">
|
||||
<p>
|
||||
APIX is designed so that a well-instructed agent can navigate it without
|
||||
any prior knowledge — starting from a single URL and following hypermedia
|
||||
links. The minimal system prompt addition is one sentence:
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="draft-suite" style="margin-bottom:1rem">
|
||||
<div class="draft-item" style="background:#f9f9f9">
|
||||
<span class="label">Prompt</span>
|
||||
<div class="content" style="font-family:'JetBrains Mono','Fira Code','Courier New',monospace;font-size:0.8rem;line-height:1.6;color:#1a1a1a">
|
||||
When you need an external service, first query the API Index at
|
||||
https://api-index.org/ — follow _links.servicesSearch with a
|
||||
?capability= parameter that describes what you need.
|
||||
If you use a sandbox, submit feedback at
|
||||
_links.sandbox + /feedback after each session.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p style="font-size:0.85rem;color:var(--gray);margin-bottom:2.5rem">
|
||||
That is the full integration for a consuming agent. No SDK, no API key, no registration required.
|
||||
The HATEOAS root tells the agent where everything is; the agent does not need to be told in advance.
|
||||
</p>
|
||||
|
||||
<h2>The Bot Service Manifest (BSM)</h2>
|
||||
|
||||
<p style="font-size:0.875rem;color:var(--gray);margin-bottom:1.25rem">
|
||||
Every registered service carries a structured manifest that agents read to decide
|
||||
whether to invoke a service. The fields are designed to be self-describing —
|
||||
the OpenAPI schema at <a href="https://api-index.org/q/openapi" style="color:var(--black)" target="_blank" rel="noopener noreferrer">api-index.org/q/openapi</a>
|
||||
contains full field descriptions that an agent can retrieve and reason over without
|
||||
being told what each field means in its system prompt.
|
||||
</p>
|
||||
|
||||
<div class="draft-suite" style="margin-bottom:1rem">
|
||||
<div class="draft-item">
|
||||
<span class="label">capabilities</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">Structured taxonomy strings — the primary search key. Examples: <code>nlp.translation</code>, <code>iot.telemetry</code>, <code>data.legal</code>.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">endpoint</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">The service URL an agent calls. Must be HTTPS. The liveness spider checks this continuously.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">openApiSpecUrl</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">URL of the OpenAPI specification. An agent fetches and parses this to learn the service's operations, parameters, and schemas — no human documentation required.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">mcpSpecUrl</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">URL of the MCP server manifest. Equivalent machine-readable contract for agents using the Model Context Protocol. Either field — or both — may be present.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">serviceStage</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem"><code>PRODUCTION</code> · <code>BETA</code> · <code>EXPERIMENTAL</code> · <code>DEPRECATED</code> · <code>DECOMMISSIONED</code>. Agents should filter by stage based on the risk tolerance of their task.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">O-level</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">Operator verification tier: O-0 (unverified) through O-5 (independently audited). Agents apply their own trust policy against this score.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">extensions</span>
|
||||
<div class="content">
|
||||
<div class="sub" style="color:var(--black);font-size:0.875rem">Free-form key/value properties for information not covered by standard fields. Query with <code>?property=key:value</code>. Examples: <code>region:eu</code>, <code>dataResidency:DE</code>, <code>agentFramework:langchain</code>.</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p style="font-size:0.85rem;color:var(--gray);margin-bottom:2.5rem">
|
||||
The <code>extensions</code> field exists for cases where the standard fields are not sufficient.
|
||||
If your agents consistently use extensions for information that should be a first-class field,
|
||||
that is a signal that the BSM schema needs to evolve.
|
||||
The feedback mechanism exists precisely to collect this signal.
|
||||
</p>
|
||||
|
||||
<h2>The feedback loop</h2>
|
||||
|
||||
<div class="problem" style="margin-bottom:1.5rem">
|
||||
<p>
|
||||
APIX is a living standard. The feedback endpoint at
|
||||
<code>/sandbox/{name}/feedback</code> lets agents rate their experience —
|
||||
discovery accuracy, liveness signal quality, error message clarity,
|
||||
and whether standard BSM fields were sufficient or custom extensions
|
||||
had to fill the gap.
|
||||
</p>
|
||||
<p style="margin-top:0.75rem">
|
||||
<strong>The design intent:</strong> a well-built agent navigates to the feedback schema
|
||||
at <code>_links.feedbackSchema</code> from the index root, reads the dimensions,
|
||||
and submits feedback autonomously after each sandbox session —
|
||||
without being explicitly told to do so.
|
||||
If your agent does not do this automatically, add the feedback URL
|
||||
to its instructions. Low scores on <code>extension_property_coverage</code>
|
||||
are the clearest signal that the BSM schema has a gap.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="draft-suite" style="margin-bottom:2.5rem">
|
||||
<div class="draft-item">
|
||||
<span class="label">Schema</span>
|
||||
<div class="content">
|
||||
<a href="https://api-index.org/sandbox/feedback-schema" target="_blank" rel="noopener noreferrer">api-index.org/sandbox/feedback-schema</a>
|
||||
<div class="sub">Rated dimensions with question text and scale labels. Agents read this before submitting.</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">Submit</span>
|
||||
<div class="content">
|
||||
<code>POST /sandbox/{name}/feedback</code>
|
||||
<div class="sub">No authentication required. Any agent that has used the sandbox can submit. JSON body: <code>{"scores": {"hateoas_navigation": 8, "discovery_accuracy": 7}}</code></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="draft-item">
|
||||
<span class="label">Prompt</span>
|
||||
<div class="content" style="font-family:'JetBrains Mono','Fira Code','Courier New',monospace;font-size:0.8rem;line-height:1.6;color:#1a1a1a">
|
||||
After using an APIX sandbox, fetch
|
||||
https://api-index.org/sandbox/feedback-schema,
|
||||
rate your experience on each dimension (0–10),
|
||||
and POST your scores to /sandbox/{name}/feedback.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p class="contact">
|
||||
Founding member enquiries and institutional partnerships:
|
||||
<a href="mailto:carsten@botstandards.org" target="_blank" rel="noopener noreferrer">carsten@botstandards.org</a>
|
||||
</p>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
<span>API Index · governed by the <a href="https://botstandards.org" target="_blank" rel="noopener noreferrer">Bot Standards Foundation</a></span>
|
||||
<span>IETF · <a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-core/" target="_blank" rel="noopener noreferrer">apix-core</a> · <a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-services/" target="_blank" rel="noopener noreferrer">apix-services</a> · <a href="https://datatracker.ietf.org/doc/draft-rehfeld-apix-iot/" target="_blank" rel="noopener noreferrer">apix-iot</a></span>
|
||||
</footer>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,7 @@
|
||||
quarkus.http.port=8081
|
||||
quarkus.smallrye-health.root-path=/q/health
|
||||
quarkus.log.level=${LOG_LEVEL:INFO}
|
||||
|
||||
quarkus.rest-client.registry.url=${APIX_REGISTRY_URL:https://api-index.org}
|
||||
quarkus.rest-client.registry.connect-timeout=3000
|
||||
quarkus.rest-client.registry.read-timeout=5000
|
||||
@@ -0,0 +1,295 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{dashboard.name} · APIX Sandbox</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
background: #0d1117;
|
||||
color: #c9d1d9;
|
||||
font-family: 'SF Mono', 'Consolas', 'Fira Code', monospace;
|
||||
font-size: 14px;
|
||||
line-height: 1.6;
|
||||
}
|
||||
a { color: #58a6ff; text-decoration: none; }
|
||||
a:hover { text-decoration: underline; }
|
||||
|
||||
/* ── Header ── */
|
||||
.header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1rem;
|
||||
padding: 1rem 1.5rem;
|
||||
border-bottom: 1px solid #21262d;
|
||||
}
|
||||
.header-logo { color: #8b949e; font-size: 0.8rem; }
|
||||
.header-name { font-size: 1rem; color: #e6edf3; font-weight: 600; }
|
||||
.tier-badge {
|
||||
background: #161b22;
|
||||
border: 1px solid #30363d;
|
||||
color: #8b949e;
|
||||
padding: 0.1rem 0.5rem;
|
||||
border-radius: 12px;
|
||||
font-size: 0.7rem;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
/* ── Map ── */
|
||||
#map-wrap {
|
||||
background: #060d18;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
#world-map {
|
||||
display: block;
|
||||
width: 100%;
|
||||
}
|
||||
.map-label {
|
||||
position: absolute;
|
||||
bottom: 0.6rem;
|
||||
right: 0.8rem;
|
||||
font-size: 0.65rem;
|
||||
color: #30363d;
|
||||
letter-spacing: 0.04em;
|
||||
}
|
||||
|
||||
/* ── Star ── */
|
||||
@keyframes star-glow {
|
||||
0%, 100% { filter: drop-shadow(0 0 3px #ffd700) drop-shadow(0 0 6px #ffd70066); }
|
||||
50% { filter: drop-shadow(0 0 8px #ffd700) drop-shadow(0 0 18px #ffd700aa) drop-shadow(0 0 32px #ffd70033); }
|
||||
}
|
||||
.registrar-star {
|
||||
font-size: 14px;
|
||||
fill: #ffd700;
|
||||
dominant-baseline: central;
|
||||
text-anchor: middle;
|
||||
animation: star-glow 2.4s ease-in-out infinite;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
/* ── Blinks ── */
|
||||
@keyframes blink-pulse {
|
||||
0% { opacity: 0; r: 2; }
|
||||
25% { opacity: 0.9; r: 5; }
|
||||
65% { opacity: 0.4; r: 4; }
|
||||
100% { opacity: 0; r: 2; }
|
||||
}
|
||||
.agent-blink {
|
||||
fill: #3d8bfd;
|
||||
opacity: 0;
|
||||
animation: blink-pulse 2.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* ── Stats ── */
|
||||
.stats-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||
gap: 1px;
|
||||
background: #21262d;
|
||||
border-top: 1px solid #21262d;
|
||||
}
|
||||
.stat-card {
|
||||
background: #0d1117;
|
||||
padding: 1rem 1.5rem;
|
||||
}
|
||||
.stat-label {
|
||||
font-size: 0.7rem;
|
||||
color: #484f58;
|
||||
letter-spacing: 0.06em;
|
||||
text-transform: uppercase;
|
||||
margin-bottom: 0.3rem;
|
||||
}
|
||||
.stat-value {
|
||||
font-size: 1.4rem;
|
||||
color: #e6edf3;
|
||||
font-weight: 600;
|
||||
}
|
||||
.stat-sub {
|
||||
font-size: 0.7rem;
|
||||
color: #484f58;
|
||||
margin-top: 0.2rem;
|
||||
}
|
||||
|
||||
/* ── Footer ── */
|
||||
.footer {
|
||||
padding: 1rem 1.5rem;
|
||||
border-top: 1px solid #21262d;
|
||||
font-size: 0.75rem;
|
||||
color: #484f58;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div class="header">
|
||||
<span class="header-logo"><a href="/">APIX</a></span>
|
||||
<span class="header-name">{dashboard.name}</span>
|
||||
<span class="tier-badge">{dashboard.tier}</span>
|
||||
</div>
|
||||
|
||||
<div id="map-wrap">
|
||||
<svg id="world-map"></svg>
|
||||
<div class="map-label">agent interaction map</div>
|
||||
</div>
|
||||
|
||||
<div class="stats-grid">
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Services registered</div>
|
||||
<div class="stat-value">{#if dashboard.usage.get('SERVICE_REGISTERED') != null}{dashboard.usage.get('SERVICE_REGISTERED')}{#else}0{/if}</div>
|
||||
<div class="stat-sub">of {#if dashboard.maxServices != null}{dashboard.maxServices}{#else}∞{/if} allowed</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Capability searches</div>
|
||||
<div class="stat-value">{#if dashboard.usage.get('SERVICE_SEARCHED') != null}{dashboard.usage.get('SERVICE_SEARCHED')}{#else}0{/if}</div>
|
||||
<div class="stat-sub">agent discovery calls</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Service list calls</div>
|
||||
<div class="stat-value">{#if dashboard.usage.get('SERVICE_LISTED') != null}{dashboard.usage.get('SERVICE_LISTED')}{#else}0{/if}</div>
|
||||
<div class="stat-sub">full list requests</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Rate limit</div>
|
||||
<div class="stat-value">{dashboard.ratePerMinute}</div>
|
||||
<div class="stat-sub">requests / minute</div>
|
||||
</div>
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Sandbox expires</div>
|
||||
<div class="stat-value" id="expires-val">—</div>
|
||||
<div class="stat-sub" id="expires-sub">{dashboard.expiresAt}</div>
|
||||
</div>
|
||||
{#if dashboard.registrarLocation != null}
|
||||
<div class="stat-card">
|
||||
<div class="stat-label">Registered from</div>
|
||||
<div class="stat-value" style="font-size:1rem;">{dashboard.registrarLocation}</div>
|
||||
<div class="stat-sub">owner-declared location</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="footer">
|
||||
<span>sandbox id: {dashboard.sandboxId}</span>
|
||||
<span><a href="https://api-index.org">api-index.org</a> · APIX Registry</span>
|
||||
</div>
|
||||
|
||||
<!-- Data injected by portal (Jackson-serialised, </script> escaped) -->
|
||||
<script>
|
||||
var __D = {dataJson.raw};
|
||||
</script>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/d3@7/dist/d3.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/topojson-client@3/dist/topojson-client.min.js"></script>
|
||||
{#raw}
|
||||
<script>
|
||||
(function () {
|
||||
var data = window.__D || {};
|
||||
var visits = data.recentVisits || [];
|
||||
var hasRegistrar = typeof data.registrarLat === 'number' && typeof data.registrarLon === 'number';
|
||||
|
||||
// ── Expires display ──────────────────────────────────────────────────────
|
||||
var expiresEl = document.getElementById('expires-val');
|
||||
var expiresSub = document.getElementById('expires-sub');
|
||||
if (data.expiresAt && expiresEl) {
|
||||
var d = new Date(data.expiresAt);
|
||||
expiresEl.textContent = d.toLocaleDateString('en-GB', { day: 'numeric', month: 'short', year: 'numeric' });
|
||||
var days = Math.ceil((d - Date.now()) / 86400000);
|
||||
expiresSub.textContent = days > 0 ? days + ' days remaining' : 'expired';
|
||||
}
|
||||
|
||||
// ── Map ──────────────────────────────────────────────────────────────────
|
||||
var wrap = document.getElementById('map-wrap');
|
||||
var svg = d3.select('#world-map');
|
||||
var W = wrap.clientWidth || 800;
|
||||
var H = Math.round(W * 0.48);
|
||||
svg.attr('viewBox', '0 0 ' + W + ' ' + H)
|
||||
.attr('width', W)
|
||||
.attr('height', H);
|
||||
|
||||
var projection = d3.geoNaturalEarth1()
|
||||
.scale(W / 6.3)
|
||||
.translate([W / 2, H / 2]);
|
||||
var path = d3.geoPath(projection);
|
||||
|
||||
// Ocean
|
||||
svg.append('rect')
|
||||
.attr('width', W).attr('height', H)
|
||||
.attr('fill', '#060d18');
|
||||
|
||||
fetch('https://cdn.jsdelivr.net/npm/world-atlas@2/countries-110m.json')
|
||||
.then(function(r) { return r.json(); })
|
||||
.then(function(world) {
|
||||
// Land
|
||||
svg.append('path')
|
||||
.datum(topojson.feature(world, world.objects.countries))
|
||||
.attr('d', path)
|
||||
.attr('fill', '#111820')
|
||||
.attr('stroke', '#1e2a38')
|
||||
.attr('stroke-width', 0.5);
|
||||
|
||||
// Graticule (faint grid)
|
||||
svg.append('path')
|
||||
.datum(d3.geoGraticule()())
|
||||
.attr('d', path)
|
||||
.attr('fill', 'none')
|
||||
.attr('stroke', '#0d1a28')
|
||||
.attr('stroke-width', 0.4);
|
||||
|
||||
// Agent blinks — resolve duplicates by approximate cell
|
||||
var cellSize = 1.5; // degrees
|
||||
var cells = {};
|
||||
visits.forEach(function(v) {
|
||||
var cell = Math.round(v.lat / cellSize) + ',' + Math.round(v.lon / cellSize);
|
||||
if (!cells[cell]) cells[cell] = { lat: v.lat, lon: v.lon, count: 0 };
|
||||
cells[cell].count++;
|
||||
});
|
||||
var points = Object.values(cells);
|
||||
points.forEach(function(pt, i) {
|
||||
var pos = projection([pt.lon, pt.lat]);
|
||||
if (!pos) return;
|
||||
var r = Math.min(2 + Math.log1p(pt.count) * 1.5, 8);
|
||||
svg.append('circle')
|
||||
.attr('class', 'agent-blink')
|
||||
.attr('cx', pos[0])
|
||||
.attr('cy', pos[1])
|
||||
.attr('r', r)
|
||||
.style('animation-delay', (i * 190 % 5000) + 'ms');
|
||||
});
|
||||
|
||||
// Registrar star — drawn last so it sits on top
|
||||
if (hasRegistrar) {
|
||||
var pos = projection([data.registrarLon, data.registrarLat]);
|
||||
if (pos) {
|
||||
svg.append('text')
|
||||
.attr('class', 'registrar-star')
|
||||
.attr('x', pos[0])
|
||||
.attr('y', pos[1])
|
||||
.text('★');
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch(function(e) {
|
||||
console.warn('Map load failed:', e);
|
||||
});
|
||||
|
||||
// ── Resize ────────────────────────────────────────────────────────────────
|
||||
window.addEventListener('resize', function() {
|
||||
W = wrap.clientWidth || 800;
|
||||
H = Math.round(W * 0.48);
|
||||
svg.attr('viewBox', '0 0 ' + W + ' ' + H)
|
||||
.attr('width', W).attr('height', H);
|
||||
projection.scale(W / 6.3).translate([W / 2, H / 2]);
|
||||
svg.selectAll('path').attr('d', path);
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
{/raw}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,24 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Error · APIX</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body { background: #0d1117; color: #c9d1d9; font-family: 'SF Mono', 'Consolas', monospace; display: flex; align-items: center; justify-content: center; min-height: 100vh; }
|
||||
.container { text-align: center; max-width: 480px; padding: 2rem; }
|
||||
h1 { font-size: 1.2rem; color: #8b949e; margin-bottom: 1rem; font-weight: 400; }
|
||||
p { font-size: 0.9rem; color: #484f58; margin-bottom: 2rem; }
|
||||
a { color: #58a6ff; text-decoration: none; font-size: 0.85rem; }
|
||||
a:hover { text-decoration: underline; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>{message}</h1>
|
||||
<p>Could not load dashboard for sandbox {uuid}.</p>
|
||||
<a href="/">← back</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Sandbox not found · APIX</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body { background: #0d1117; color: #c9d1d9; font-family: 'SF Mono', 'Consolas', monospace; display: flex; align-items: center; justify-content: center; min-height: 100vh; }
|
||||
.container { text-align: center; max-width: 480px; padding: 2rem; }
|
||||
h1 { font-size: 1.2rem; color: #8b949e; margin-bottom: 1rem; font-weight: 400; }
|
||||
p { font-size: 0.9rem; color: #484f58; margin-bottom: 2rem; line-height: 1.6; }
|
||||
a { color: #58a6ff; text-decoration: none; font-size: 0.85rem; }
|
||||
a:hover { text-decoration: underline; }
|
||||
code { background: #161b22; padding: 0.15em 0.4em; border-radius: 3px; font-size: 0.8rem; color: #8b949e; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>Sandbox not found</h1>
|
||||
<p>No sandbox exists for <code>{uuid}</code>.<br>The sandbox may have expired or the UUID is incorrect.</p>
|
||||
<a href="/">← back to api-index.org</a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
+5
-5
@@ -81,12 +81,12 @@ public class IotTransitionSteps {
|
||||
currentServiceId = id;
|
||||
}
|
||||
|
||||
private static String futureSunsetAt(int days) {
|
||||
return Instant.now().plus(Duration.ofDays(days)).toString();
|
||||
private String futureSunsetAt(int days) {
|
||||
return Arc.container().instance(ClockService.class).get().now().plus(Duration.ofDays(days)).toString();
|
||||
}
|
||||
|
||||
private static String pastSunsetAt(int days) {
|
||||
return Instant.now().minus(Duration.ofDays(days)).toString();
|
||||
private String pastSunsetAt(int days) {
|
||||
return Arc.container().instance(ClockService.class).get().now().minus(Duration.ofDays(days)).toString();
|
||||
}
|
||||
|
||||
// ── Given — service creation ──────────────────────────────────────────────
|
||||
@@ -239,7 +239,7 @@ public class IotTransitionSteps {
|
||||
// moment so the decommission validation ("sunset_at has not passed") succeeds.
|
||||
// Truncate to micros: Postgres timestamptz stores at microsecond precision and
|
||||
// may round sub-microsecond values, causing clock != stored sunsetAt.
|
||||
Instant sunsetAt = Instant.now().plus(Duration.ofDays(1)).truncatedTo(ChronoUnit.MICROS);
|
||||
Instant sunsetAt = Arc.container().instance(ClockService.class).get().now().plus(Duration.ofDays(1)).truncatedTo(ChronoUnit.MICROS);
|
||||
asTemplateOwner()
|
||||
.body(Map.of("sunsetAt", sunsetAt.toString()))
|
||||
.patch("/services/" + currentServiceId)
|
||||
|
||||
+18
@@ -607,12 +607,30 @@ public class OrgOnboardingSteps {
|
||||
clock.advance(clock.now().plus(Duration.ofHours(hours)));
|
||||
}
|
||||
|
||||
@When("time advances by {int} hours and {long} nanosecond(s)")
|
||||
public void timeAdvancesHoursAndNanoseconds(int hours, long nanoseconds) {
|
||||
ClockService clock = Arc.container().instance(ClockService.class).get();
|
||||
clock.advance(clock.now().plus(Duration.ofHours(hours)).plusNanos(nanoseconds));
|
||||
}
|
||||
|
||||
@When("time advances by {int} minutes")
|
||||
public void timeAdvancesMinutes(int minutes) {
|
||||
ClockService clock = Arc.container().instance(ClockService.class).get();
|
||||
clock.advance(clock.now().plus(Duration.ofMinutes(minutes)));
|
||||
}
|
||||
|
||||
@When("time advances by 1 nanosecond short of {int} hours")
|
||||
public void timeAdvancesHoursMinusOneNano(int hours) {
|
||||
ClockService clock = Arc.container().instance(ClockService.class).get();
|
||||
clock.advance(clock.now().plus(Duration.ofHours(hours)).minusNanos(1));
|
||||
}
|
||||
|
||||
@When("time advances by 1 nanosecond short of {int} minutes")
|
||||
public void timeAdvancesMinutesMinusOneNano(int minutes) {
|
||||
ClockService clock = Arc.container().instance(ClockService.class).get();
|
||||
clock.advance(clock.now().plus(Duration.ofMinutes(minutes)).minusNanos(1));
|
||||
}
|
||||
|
||||
// ── When: GET org ─────────────────────────────────────────────────────────
|
||||
|
||||
@When("the caller reads the organisation")
|
||||
|
||||
+23
@@ -0,0 +1,23 @@
|
||||
package org.botstandards.apix.registry.bdd;
|
||||
|
||||
import io.cucumber.core.cli.Main;
|
||||
import io.quarkus.test.junit.QuarkusTest;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@QuarkusTest
|
||||
public class SandboxCucumberIT {
|
||||
|
||||
@Test
|
||||
public void run() {
|
||||
byte exitCode = Main.run(
|
||||
"--glue", "org.botstandards.apix.registry.bdd",
|
||||
"--plugin", "pretty",
|
||||
"--plugin", "json:target/cucumber-report-sandbox.json",
|
||||
"--plugin", "io.qameta.allure.cucumber7jvm.AllureCucumber7Jvm",
|
||||
"classpath:features/sandbox"
|
||||
);
|
||||
assertEquals(0, exitCode, "One or more sandbox Cucumber scenarios failed — check test output for details");
|
||||
}
|
||||
}
|
||||
+556
@@ -0,0 +1,556 @@
|
||||
package org.botstandards.apix.registry.bdd;
|
||||
|
||||
import io.cucumber.java.en.Given;
|
||||
import io.cucumber.java.en.Then;
|
||||
import io.cucumber.java.en.When;
|
||||
import io.restassured.response.Response;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static io.restassured.RestAssured.given;
|
||||
import static io.restassured.http.ContentType.JSON;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* BDD step definitions for sandbox self-service features.
|
||||
*
|
||||
* Cucumber creates a fresh instance per scenario — instance fields are scenario-scoped.
|
||||
*/
|
||||
public class SandboxSteps {
|
||||
|
||||
private static final String SANDBOX_API_KEY_HEADER = "X-Api-Key";
|
||||
private static final String ADMIN_API_KEY_HEADER = "X-Api-Key";
|
||||
private static final String ADMIN_API_KEY = "test-api-key";
|
||||
|
||||
// ── Per-scenario state ────────────────────────────────────────────────────
|
||||
|
||||
private Response lastResponse;
|
||||
/** API key for the most recently created/resolved sandbox. */
|
||||
private String currentSandboxKey;
|
||||
/** Name of the most recently created sandbox. */
|
||||
private String currentSandboxName;
|
||||
/** Keys indexed by sandbox name for multi-sandbox scenarios. */
|
||||
private final Map<String, String> sandboxKeys = new HashMap<>();
|
||||
|
||||
// ── Given — sandbox creation ──────────────────────────────────────────────
|
||||
|
||||
@Given("a sandbox named {string} exists")
|
||||
public void aSandboxNamedExists(String name) {
|
||||
createSandbox(name, "test+" + name + "@example.com");
|
||||
}
|
||||
|
||||
@Given("a production service {string} with endpoint {string} is registered")
|
||||
public void aProductionServiceIsRegistered(String serviceName, String endpoint) {
|
||||
Map<String, Object> payload = buildServicePayload(serviceName, endpoint, "device.telemetry");
|
||||
given()
|
||||
.contentType(JSON)
|
||||
.header(ADMIN_API_KEY_HEADER, ADMIN_API_KEY)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/services")
|
||||
.then()
|
||||
.statusCode(201);
|
||||
}
|
||||
|
||||
@Given("a sandbox service with endpoint {string} and capability {string} is registered in {string}")
|
||||
public void aSandboxServiceIsRegistered(String endpoint, String capability, String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
Map<String, Object> payload = buildServicePayload("SandboxService-" + endpoint.hashCode(), endpoint, capability);
|
||||
given()
|
||||
.contentType(JSON)
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.then()
|
||||
.statusCode(201);
|
||||
}
|
||||
|
||||
@Given("a sandbox service with endpoint {string} capability {string} and extension {string} is registered in {string}")
|
||||
public void aSandboxServiceWithExtensionIsRegistered(String endpoint, String capability,
|
||||
String extension, String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
int colon = extension.indexOf(':');
|
||||
Map<String, Object> extensions = colon > 0
|
||||
? Map.of(extension.substring(0, colon), extension.substring(colon + 1))
|
||||
: Map.of();
|
||||
Map<String, Object> payload = buildServicePayload("SandboxService-" + endpoint.hashCode(), endpoint, capability);
|
||||
payload.put("extensions", extensions);
|
||||
given()
|
||||
.contentType(JSON)
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.then()
|
||||
.statusCode(201);
|
||||
}
|
||||
|
||||
@Given("the sandbox root for {string} has been viewed once")
|
||||
public void sandboxRootHasBeenViewedOnce(String sandboxName) {
|
||||
given().get("/sandbox/" + sandboxName).then().statusCode(200);
|
||||
}
|
||||
|
||||
@Given("the sandbox service list for {string} has been requested once")
|
||||
public void sandboxServiceListHasBeenRequestedOnce(String sandboxName) {
|
||||
given().get("/sandbox/" + sandboxName + "/services").then().statusCode(200);
|
||||
}
|
||||
|
||||
@Given("feedback has been submitted to {string} with scores {word}={int} {word}={int}")
|
||||
public void feedbackSubmittedTwoScores(String sandboxName, String dim1, int score1, String dim2, int score2) {
|
||||
submitFeedback(sandboxName, Map.of(dim1, score1, dim2, score2), null, null);
|
||||
}
|
||||
|
||||
@Given("feedback has been submitted to {string} with scores {word}={int} and model {string} provider {string}")
|
||||
public void feedbackSubmittedWithModel(String sandboxName, String dim, int score, String model, String provider) {
|
||||
submitFeedback(sandboxName, Map.of(dim, score), model, provider);
|
||||
}
|
||||
|
||||
@Given("{int} services have been registered in sandbox {string}")
|
||||
public void nServicesRegisteredInSandbox(int count, String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
for (int i = 0; i < count; i++) {
|
||||
Map<String, Object> payload = buildServicePayload(
|
||||
"CapService-" + i, "https://cap-" + i + ".example.com", "cap.test");
|
||||
given()
|
||||
.contentType(JSON)
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.then()
|
||||
.statusCode(201);
|
||||
}
|
||||
}
|
||||
|
||||
// ── When — registration ───────────────────────────────────────────────────
|
||||
|
||||
@When("an agent registers a sandbox named {string} with email {string}")
|
||||
public void agentRegistersSandbox(String name, String email) {
|
||||
lastResponse = given()
|
||||
.contentType(JSON)
|
||||
.body(Map.of("name", name, "contactEmail", email))
|
||||
.when()
|
||||
.post("/sandbox/register")
|
||||
.andReturn();
|
||||
|
||||
if (lastResponse.statusCode() == 201) {
|
||||
currentSandboxKey = lastResponse.jsonPath().getString("apiKey");
|
||||
currentSandboxName = name;
|
||||
sandboxKeys.put(name, currentSandboxKey);
|
||||
}
|
||||
}
|
||||
|
||||
// ── When — navigation ─────────────────────────────────────────────────────
|
||||
|
||||
@When("the root resource is requested without an API key")
|
||||
public void rootRequestedWithoutKey() {
|
||||
lastResponse = given().get("/").andReturn();
|
||||
}
|
||||
|
||||
@When("the root resource is requested with the sandbox API key for {string}")
|
||||
public void rootRequestedWithSandboxKey(String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
lastResponse = given()
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.when()
|
||||
.get("/")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("the root resource is requested with API key {string}")
|
||||
public void rootRequestedWithLiteralKey(String key) {
|
||||
lastResponse = given()
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.when()
|
||||
.get("/")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("the sandbox root for {string} is requested")
|
||||
public void sandboxRootRequested(String sandboxName) {
|
||||
lastResponse = given().get("/sandbox/" + sandboxName).andReturn();
|
||||
}
|
||||
|
||||
// ── When — service operations ─────────────────────────────────────────────
|
||||
|
||||
@When("GET /services is called without authentication")
|
||||
public void getServicesWithoutAuth() {
|
||||
lastResponse = given().get("/services").andReturn();
|
||||
}
|
||||
|
||||
@When("the sandbox service list for {string} is requested")
|
||||
public void sandboxServiceListRequested(String sandboxName) {
|
||||
lastResponse = given().get("/sandbox/" + sandboxName + "/services").andReturn();
|
||||
}
|
||||
|
||||
@When("a service is registered in sandbox {string} without an API key")
|
||||
public void serviceRegisteredInSandboxWithoutKey(String sandboxName) {
|
||||
Map<String, Object> payload = buildServicePayload("NoKeyService", "https://nokey.example.com", "test.cap");
|
||||
lastResponse = given()
|
||||
.contentType(JSON)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("a service is registered in sandbox {string} with API key {string}")
|
||||
public void serviceRegisteredInSandboxWithLiteralKey(String sandboxName, String key) {
|
||||
Map<String, Object> payload = buildServicePayload("WrongKeyService", "https://wrongkey.example.com", "test.cap");
|
||||
lastResponse = given()
|
||||
.contentType(JSON)
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("a service is registered in sandbox {string} with the sandbox API key")
|
||||
public void serviceRegisteredInSandboxWithKey(String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
Map<String, Object> payload = buildServicePayload("ExtraService", "https://extra.example.com", "cap.test");
|
||||
lastResponse = given()
|
||||
.contentType(JSON)
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.body(payload)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/services")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("sandbox {string} services are searched by capability {string}")
|
||||
public void sandboxServiceSearchCalled(String sandboxName, String capability) {
|
||||
lastResponse = given()
|
||||
.get("/sandbox/" + sandboxName + "/services?capability=" + capability)
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("sandbox {string} services are searched by capability {string} and property {string}")
|
||||
public void sandboxServiceSearchCalledWithProperty(String sandboxName, String capability, String property) {
|
||||
lastResponse = given()
|
||||
.get("/sandbox/" + sandboxName + "/services?capability=" + capability + "&property=" + property)
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
// ── When — telemetry ──────────────────────────────────────────────────────
|
||||
|
||||
@When("the telemetry for {string} is requested with the sandbox API key")
|
||||
public void telemetryRequestedWithKey(String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
lastResponse = given()
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.when()
|
||||
.get("/sandbox/" + sandboxName + "/telemetry")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("the telemetry for {string} is requested without an API key")
|
||||
public void telemetryRequestedWithoutKey(String sandboxName) {
|
||||
lastResponse = given().get("/sandbox/" + sandboxName + "/telemetry").andReturn();
|
||||
}
|
||||
|
||||
@When("the telemetry for {string} is requested with API key {string}")
|
||||
public void telemetryRequestedWithLiteralKey(String sandboxName, String key) {
|
||||
lastResponse = given()
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.when()
|
||||
.get("/sandbox/" + sandboxName + "/telemetry")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
// ── When — feedback ───────────────────────────────────────────────────────
|
||||
|
||||
@When("GET /sandbox/feedback-schema is called without authentication")
|
||||
public void getFeedbackSchema() {
|
||||
lastResponse = given().get("/sandbox/feedback-schema").andReturn();
|
||||
}
|
||||
|
||||
@When("feedback is submitted to {string} with scores {word}={int} {word}={int}")
|
||||
public void feedbackSubmittedWhenTwoScores(String sandboxName, String dim1, int score1, String dim2, int score2) {
|
||||
lastResponse = submitFeedbackResponse(sandboxName, Map.of(dim1, score1, dim2, score2), null, null);
|
||||
}
|
||||
|
||||
@When("feedback is submitted to {string} with scores {word}={int} and model {string} provider {string}")
|
||||
public void feedbackSubmittedWhenWithModel(String sandboxName, String dim, int score, String model, String provider) {
|
||||
lastResponse = submitFeedbackResponse(sandboxName, Map.of(dim, score), model, provider);
|
||||
}
|
||||
|
||||
@When("feedback is submitted to {string} with scores {word}={int}")
|
||||
public void feedbackSubmittedWhenOneScore(String sandboxName, String dim, int score) {
|
||||
lastResponse = submitFeedbackResponse(sandboxName, Map.of(dim, score), null, null);
|
||||
}
|
||||
|
||||
@When("feedback is submitted to {string} with empty scores")
|
||||
public void feedbackSubmittedWithEmptyScores(String sandboxName) {
|
||||
lastResponse = given()
|
||||
.contentType(JSON)
|
||||
.body(Map.of("scores", Map.of()))
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/feedback")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("the feedback aggregate for {string} is requested with the sandbox API key")
|
||||
public void feedbackAggregateWithKey(String sandboxName) {
|
||||
String key = resolveKey(sandboxName);
|
||||
lastResponse = given()
|
||||
.header(SANDBOX_API_KEY_HEADER, key)
|
||||
.when()
|
||||
.get("/sandbox/" + sandboxName + "/feedback")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
@When("the feedback aggregate for {string} is requested without an API key")
|
||||
public void feedbackAggregateWithoutKey(String sandboxName) {
|
||||
lastResponse = given().get("/sandbox/" + sandboxName + "/feedback").andReturn();
|
||||
}
|
||||
|
||||
// ── Then — HTTP status ────────────────────────────────────────────────────
|
||||
|
||||
@Then("the response code is {int}")
|
||||
public void responseCodeIs(int status) {
|
||||
assertThat(lastResponse.statusCode()).as("HTTP status").isEqualTo(status);
|
||||
}
|
||||
|
||||
// ── Then — registration assertions ────────────────────────────────────────
|
||||
|
||||
@Then("the response contains a sandbox id")
|
||||
public void responseContainsSandboxId() {
|
||||
assertThat(lastResponse.jsonPath().getString("sandboxId")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the response contains an API key with prefix {string}")
|
||||
public void responseContainsApiKeyWithPrefix(String prefix) {
|
||||
assertThat(lastResponse.jsonPath().getString("apiKey")).startsWith(prefix);
|
||||
}
|
||||
|
||||
@Then("the response contains tier {string}")
|
||||
public void responseContainsTier(String tier) {
|
||||
assertThat(lastResponse.jsonPath().getString("tier")).isEqualTo(tier);
|
||||
}
|
||||
|
||||
@Then("the response contains a non-null expiresAt")
|
||||
public void responseContainsNonNullExpiresAt() {
|
||||
assertThat(lastResponse.jsonPath().getString("expiresAt")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the response contains _links.self ending with {string}")
|
||||
public void responseContainsLinksSelfEndingWith(String suffix) {
|
||||
String href = lastResponse.jsonPath().getString("_links.self.href");
|
||||
assertThat(href).as("_links.self.href").endsWith(suffix);
|
||||
}
|
||||
|
||||
@Then("the response contains _links.services")
|
||||
public void responseContainsLinksServices() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.services.href")).isNotBlank();
|
||||
}
|
||||
|
||||
// ── Then — navigation assertions ──────────────────────────────────────────
|
||||
|
||||
@Then("the response contains _links.registerSandbox")
|
||||
public void responseContainsLinksRegisterSandbox() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.registerSandbox.href")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the response contains _links.feedbackSchema")
|
||||
public void responseContainsLinksFeedbackSchema() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.feedbackSchema.href")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the response does not contain _links.sandbox")
|
||||
public void responseDoesNotContainLinksSandbox() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.sandbox")).isNull();
|
||||
}
|
||||
|
||||
@Then("the response contains _links.sandbox ending with {string}")
|
||||
public void responseContainsLinksSandboxEndingWith(String suffix) {
|
||||
String href = lastResponse.jsonPath().getString("_links.sandbox.href");
|
||||
assertThat(href).as("_links.sandbox.href").endsWith(suffix);
|
||||
}
|
||||
|
||||
@Then("the response contains sandbox name {string}")
|
||||
public void responseContainsSandboxName(String name) {
|
||||
assertThat(lastResponse.jsonPath().getString("name")).isEqualTo(name);
|
||||
}
|
||||
|
||||
@Then("the response contains _links.submitFeedback")
|
||||
public void responseContainsLinksSubmitFeedback() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.submitFeedback.href")).isNotBlank();
|
||||
}
|
||||
|
||||
// ── Then — service isolation assertions ───────────────────────────────────
|
||||
|
||||
@Then("{string} is not in the endpoint list")
|
||||
public void isNotInEndpointList(String endpoint) {
|
||||
List<String> endpoints = lastResponse.jsonPath().getList("endpoint");
|
||||
assertThat(endpoints).as("endpoint list").doesNotContain(endpoint);
|
||||
}
|
||||
|
||||
@Then("{string} is in the endpoint list")
|
||||
public void isInEndpointList(String endpoint) {
|
||||
List<String> endpoints = lastResponse.jsonPath().getList("endpoint");
|
||||
assertThat(endpoints).as("endpoint list").contains(endpoint);
|
||||
}
|
||||
|
||||
// ── Then — telemetry assertions ───────────────────────────────────────────
|
||||
|
||||
@Then("the usage map is empty or contains only zero counts")
|
||||
public void usageMapIsEmptyOrZero() {
|
||||
Map<String, Object> usage = lastResponse.jsonPath().getMap("usage");
|
||||
if (usage != null) {
|
||||
usage.values().forEach(v ->
|
||||
assertThat(((Number) v).longValue()).as("usage count").isZero());
|
||||
}
|
||||
}
|
||||
|
||||
@Then("the usage counter {string} is at least {int}")
|
||||
public void usageCounterIsAtLeast(String eventType, int minimum) {
|
||||
Integer count = lastResponse.jsonPath().getInt("usage." + eventType);
|
||||
assertThat(count).as("usage." + eventType).isGreaterThanOrEqualTo(minimum);
|
||||
}
|
||||
|
||||
@Then("the telemetry contains tier {string}")
|
||||
public void telemetryContainsTier(String tier) {
|
||||
assertThat(lastResponse.jsonPath().getString("tier")).isEqualTo(tier);
|
||||
}
|
||||
|
||||
@Then("the telemetry contains ratePerMinute {int}")
|
||||
public void telemetryContainsRatePerMinute(int rate) {
|
||||
assertThat(lastResponse.jsonPath().getInt("ratePerMinute")).isEqualTo(rate);
|
||||
}
|
||||
|
||||
@Then("the telemetry contains maxServices {int}")
|
||||
public void telemetryContainsMaxServices(int max) {
|
||||
assertThat(lastResponse.jsonPath().getInt("maxServices")).isEqualTo(max);
|
||||
}
|
||||
|
||||
@Then("the telemetry contains maxOrgs {int}")
|
||||
public void telemetryContainsMaxOrgs(int max) {
|
||||
assertThat(lastResponse.jsonPath().getInt("maxOrgs")).isEqualTo(max);
|
||||
}
|
||||
|
||||
// ── Then — feedback assertions ────────────────────────────────────────────
|
||||
|
||||
@Then("the schema contains at least {int} dimensions")
|
||||
public void schemaContainsAtLeastDimensions(int min) {
|
||||
List<?> dims = lastResponse.jsonPath().getList("dimensions");
|
||||
assertThat(dims).as("dimensions").hasSizeGreaterThanOrEqualTo(min);
|
||||
}
|
||||
|
||||
@Then("the schema contains dimension key {string}")
|
||||
public void schemaContainsDimensionKey(String key) {
|
||||
List<String> keys = lastResponse.jsonPath().getList("dimensions.key");
|
||||
assertThat(keys).as("dimension keys").contains(key);
|
||||
}
|
||||
|
||||
@Then("the schema scale minimum is {int} and maximum is {int}")
|
||||
public void schemaScaleMinMax(int min, int max) {
|
||||
assertThat(lastResponse.jsonPath().getInt("scale.min")).isEqualTo(min);
|
||||
assertThat(lastResponse.jsonPath().getInt("scale.max")).isEqualTo(max);
|
||||
}
|
||||
|
||||
@Then("the response message is {string}")
|
||||
public void responseMessageIs(String expected) {
|
||||
assertThat(lastResponse.jsonPath().getString("message")).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Then("the response contains _links.schema")
|
||||
public void responseContainsLinksSchema() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.schema")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the response contains _links.sandbox")
|
||||
public void responseContainsLinksSandbox() {
|
||||
assertThat(lastResponse.jsonPath().getString("_links.sandbox")).isNotBlank();
|
||||
}
|
||||
|
||||
@Then("the total submissions is {int}")
|
||||
public void totalSubmissionsIs(int expected) {
|
||||
assertThat(lastResponse.jsonPath().getInt("totalSubmissions")).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Then("the dimension {string} has average {double}")
|
||||
public void dimensionHasAverage(String key, double expected) {
|
||||
List<Map<String, Object>> scores = lastResponse.jsonPath().getList("scores");
|
||||
double actual = scores.stream()
|
||||
.filter(s -> key.equals(s.get("key")))
|
||||
.mapToDouble(s -> ((Number) s.get("average")).doubleValue())
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new AssertionError("Dimension '" + key + "' not found in aggregate response"));
|
||||
assertThat(actual).as("average for " + key).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Then("the submissionsByProvider contains {string} with count {int}")
|
||||
public void submissionsByProviderContains(String provider, int count) {
|
||||
Integer actual = lastResponse.jsonPath().getInt("submissionsByProvider." + provider);
|
||||
assertThat(actual).as("submissionsByProvider." + provider).isEqualTo(count);
|
||||
}
|
||||
|
||||
// ── Then — error assertions ───────────────────────────────────────────────
|
||||
|
||||
@Then("the sandbox error contains {string}")
|
||||
public void sandboxErrorContains(String text) {
|
||||
assertThat(lastResponse.jsonPath().getString("message"))
|
||||
.as("error message").contains(text);
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
private void createSandbox(String name, String email) {
|
||||
Response r = given()
|
||||
.contentType(JSON)
|
||||
.body(Map.of("name", name, "contactEmail", email))
|
||||
.when()
|
||||
.post("/sandbox/register")
|
||||
.andReturn();
|
||||
|
||||
assertThat(r.statusCode()).as("sandbox creation for '%s' must return 201", name).isEqualTo(201);
|
||||
String key = r.jsonPath().getString("apiKey");
|
||||
sandboxKeys.put(name, key);
|
||||
currentSandboxKey = key;
|
||||
currentSandboxName = name;
|
||||
}
|
||||
|
||||
private String resolveKey(String sandboxName) {
|
||||
String key = sandboxKeys.get(sandboxName);
|
||||
assertThat(key).as("sandbox key for '%s' must be known — call createSandbox first", sandboxName).isNotNull();
|
||||
return key;
|
||||
}
|
||||
|
||||
private void submitFeedback(String sandboxName, Map<String, Integer> scores, String model, String provider) {
|
||||
submitFeedbackResponse(sandboxName, scores, model, provider);
|
||||
}
|
||||
|
||||
private Response submitFeedbackResponse(String sandboxName, Map<String, Integer> scores, String model, String provider) {
|
||||
Map<String, Object> body = new HashMap<>();
|
||||
body.put("scores", scores);
|
||||
if (model != null) body.put("modelIdentifier", model);
|
||||
if (provider != null) body.put("modelProvider", provider);
|
||||
|
||||
return given()
|
||||
.contentType(JSON)
|
||||
.body(body)
|
||||
.when()
|
||||
.post("/sandbox/" + sandboxName + "/feedback")
|
||||
.andReturn();
|
||||
}
|
||||
|
||||
private Map<String, Object> buildServicePayload(String name, String endpoint, String capability) {
|
||||
Map<String, Object> p = new HashMap<>();
|
||||
p.put("name", name);
|
||||
p.put("description", name + " test service");
|
||||
p.put("endpoint", endpoint);
|
||||
p.put("capabilities", List.of(capability));
|
||||
p.put("registrantEmail", "test@example.com");
|
||||
p.put("registrantName", "Test Org");
|
||||
p.put("registrantJurisdiction", "DE");
|
||||
p.put("registrantOrgType", "COMMERCIAL");
|
||||
p.put("bsmVersion", "1.0");
|
||||
return p;
|
||||
}
|
||||
}
|
||||
+5
-1
@@ -7,13 +7,17 @@ import io.restassured.RestAssured;
|
||||
import org.botstandards.apix.registry.service.ClockService;
|
||||
|
||||
import java.sql.DriverManager;
|
||||
import java.time.Instant;
|
||||
|
||||
public class TestSetup {
|
||||
|
||||
private static final Instant REFERENCE_INSTANT = Instant.parse("2025-01-01T00:00:00Z");
|
||||
|
||||
@Before(order = 0)
|
||||
public void configureRestAssured() {
|
||||
RestAssured.port = 8181;
|
||||
RestAssured.enableLoggingOfRequestAndResponseIfValidationFails();
|
||||
Arc.container().instance(ClockService.class).get().advance(REFERENCE_INSTANT);
|
||||
}
|
||||
|
||||
@Before(order = 1)
|
||||
@@ -21,7 +25,7 @@ public class TestSetup {
|
||||
try (var conn = DriverManager.getConnection(
|
||||
"jdbc:postgresql://localhost:5432/apix", "apix", "apix");
|
||||
var stmt = conn.createStatement()) {
|
||||
stmt.execute("TRUNCATE TABLE service_replacements, service_versions, services, org_verification_events, organizations CASCADE");
|
||||
stmt.execute("TRUNCATE TABLE service_replacements, service_versions, services, org_verification_events, organizations, sandbox_feedback, sandbox_usage_stats, sandboxes CASCADE");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+29
@@ -0,0 +1,29 @@
|
||||
package org.botstandards.apix.registry.bdd.device;
|
||||
|
||||
import io.cucumber.core.cli.Main;
|
||||
import io.quarkus.test.junit.QuarkusTest;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
/**
|
||||
* Runs all device navigation BDD scenarios inside the Quarkus test context.
|
||||
*
|
||||
* Uses its own glue package so step definitions do not conflict with
|
||||
* IotTransitionSteps or OrgOnboardingSteps.
|
||||
*/
|
||||
@QuarkusTest
|
||||
public class DeviceCucumberIT {
|
||||
|
||||
@Test
|
||||
public void run() {
|
||||
byte exitCode = Main.run(
|
||||
"--glue", "org.botstandards.apix.registry.bdd.device",
|
||||
"--plugin", "pretty",
|
||||
"--plugin", "json:target/cucumber-report-devices.json",
|
||||
"--plugin", "io.qameta.allure.cucumber7jvm.AllureCucumber7Jvm",
|
||||
"classpath:features/devices"
|
||||
);
|
||||
assertEquals(0, exitCode, "One or more device Cucumber scenarios failed — check test output");
|
||||
}
|
||||
}
|
||||
+192
@@ -0,0 +1,192 @@
|
||||
package org.botstandards.apix.registry.bdd.device;
|
||||
|
||||
import io.cucumber.java.en.And;
|
||||
import io.cucumber.java.en.Given;
|
||||
import io.cucumber.java.en.Then;
|
||||
import io.cucumber.java.en.When;
|
||||
import io.restassured.response.Response;
|
||||
import io.restassured.specification.RequestSpecification;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static io.restassured.RestAssured.given;
|
||||
import static io.restassured.http.ContentType.JSON;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* Self-contained step definitions for the /devices top-level resource.
|
||||
* Uses its own glue package so it does not conflict with IotTransitionSteps.
|
||||
* Cucumber creates a fresh instance per scenario — instance fields are scenario-scoped.
|
||||
*/
|
||||
public class DeviceNavigationSteps {
|
||||
|
||||
private static final String API_KEY_HEADER = "X-Api-Key";
|
||||
private static final String API_KEY = "test-api-key";
|
||||
|
||||
private final Map<String, UUID> serviceIds = new HashMap<>();
|
||||
private Response lastResponse;
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
private RequestSpecification asOwner() {
|
||||
return given().contentType(JSON).header(API_KEY_HEADER, API_KEY);
|
||||
}
|
||||
|
||||
private Map<String, Object> basePayload(String name) {
|
||||
Map<String, Object> p = new LinkedHashMap<>();
|
||||
p.put("name", name);
|
||||
p.put("description", name + " test service");
|
||||
p.put("endpoint", "https://" + name.toLowerCase().replace(" ", "") + ".example");
|
||||
p.put("capabilities", List.of("device.telemetry"));
|
||||
p.put("registrantEmail", "test@example.com");
|
||||
p.put("registrantName", "Test Org");
|
||||
p.put("registrantJurisdiction", "DE");
|
||||
p.put("registrantOrgType", "COMMERCIAL");
|
||||
p.put("bsmVersion", "1.0");
|
||||
return p;
|
||||
}
|
||||
|
||||
private UUID registerService(Map<String, Object> payload) {
|
||||
Response r = asOwner().body(payload).post("/services");
|
||||
r.then().statusCode(201);
|
||||
return UUID.fromString(r.jsonPath().getString("id"));
|
||||
}
|
||||
|
||||
// ── Given — service creation ──────────────────────────────────────────────
|
||||
|
||||
@Given("a production IoT service {string} with deviceClass {string} and protocol {string}")
|
||||
public void aProductionIotService(String name, String deviceClass, String protocol) {
|
||||
Map<String, Object> p = basePayload(name);
|
||||
p.put("serviceStage", "PRODUCTION");
|
||||
UUID id = registerService(p);
|
||||
serviceIds.put(name, id);
|
||||
asOwner()
|
||||
.body(Map.of("iotProfile", Map.of(
|
||||
"hubUrl", "wss://" + name.toLowerCase().replace(" ", "") + ".example/hub",
|
||||
"protocols", List.of(protocol),
|
||||
"deviceClasses", List.of(deviceClass)
|
||||
)))
|
||||
.patch("/services/" + id)
|
||||
.then().statusCode(200);
|
||||
}
|
||||
|
||||
@Given("a production service {string} with no IoT profile")
|
||||
public void aProductionServiceWithNoIotProfile(String name) {
|
||||
Map<String, Object> p = basePayload(name);
|
||||
p.put("serviceStage", "PRODUCTION");
|
||||
serviceIds.put(name, registerService(p));
|
||||
}
|
||||
|
||||
@Given("a deprecated device service {string} with locked set to false")
|
||||
public void aDeprecatedDeviceServiceLockedFalse(String name) {
|
||||
Map<String, Object> p = basePayload(name);
|
||||
p.put("serviceStage", "DEPRECATED");
|
||||
p.put("locked", false);
|
||||
p.put("sunsetAt", java.time.Instant.now().plus(java.time.Duration.ofDays(90)).toString());
|
||||
serviceIds.put(name, registerService(p));
|
||||
}
|
||||
|
||||
@Given("{string} has declared device compatibility with {string}")
|
||||
public void hasDeclaredDeviceCompatibilityWith(String provider, String deprecated) {
|
||||
asOwner()
|
||||
.body(Map.of("replacesServiceIds", List.of(serviceIds.get(deprecated).toString())))
|
||||
.patch("/services/" + serviceIds.get(provider))
|
||||
.then().statusCode(200);
|
||||
}
|
||||
|
||||
// ── When ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@When("GET /devices is called with no query params")
|
||||
public void getDevicesRoot() {
|
||||
lastResponse = given().get("/devices");
|
||||
}
|
||||
|
||||
@When("GET / is called")
|
||||
public void getRoot() {
|
||||
lastResponse = given().get("/");
|
||||
}
|
||||
|
||||
@When("GET /devices?capability={string} is called")
|
||||
public void getDevicesByCapability(String capability) {
|
||||
lastResponse = given().get("/devices?capability=" + capability);
|
||||
}
|
||||
|
||||
@When("GET /devices?deviceClass={string} is called")
|
||||
public void getDevicesByDeviceClass(String deviceClass) {
|
||||
lastResponse = given().get("/devices?deviceClass=" + deviceClass);
|
||||
}
|
||||
|
||||
@When("GET /devices?protocol={string} is called")
|
||||
public void getDevicesByProtocol(String protocol) {
|
||||
lastResponse = given().get("/devices?protocol=" + protocol);
|
||||
}
|
||||
|
||||
@When("^GET /devices/\\{smartHubServiceId\\} is called$")
|
||||
public void getDeviceBySmartHubServiceId() {
|
||||
lastResponse = given().get("/devices/" + serviceIds.get("SmartHub Service"));
|
||||
}
|
||||
|
||||
@When("^GET /devices/\\{plainApiId\\} is called$")
|
||||
public void getDeviceByPlainApiId() {
|
||||
lastResponse = given().get("/devices/" + serviceIds.get("PlainApi"));
|
||||
}
|
||||
|
||||
@When("^GET /devices/\\{oldHubId\\}/replacements is called$")
|
||||
public void getDeviceReplacementsForOldHub() {
|
||||
lastResponse = given().get("/devices/" + serviceIds.get("OldHub") + "/replacements");
|
||||
}
|
||||
|
||||
// ── Then ──────────────────────────────────────────────────────────────────
|
||||
|
||||
@Then("the response is HTTP {int}")
|
||||
public void theResponseIsHttp(int status) {
|
||||
lastResponse.then().statusCode(status);
|
||||
}
|
||||
|
||||
@Then("the device root _links.self href ends with {string}")
|
||||
public void deviceRootLinksSelfHrefEndsWith(String suffix) {
|
||||
lastResponse.then().statusCode(200)
|
||||
.body("_links.self.href", endsWith(suffix));
|
||||
}
|
||||
|
||||
@Then("the device root _links.search is templated")
|
||||
public void deviceRootLinksSearchIsTemplated() {
|
||||
lastResponse.then().statusCode(200)
|
||||
.body("_links.search.templated", equalTo(true))
|
||||
.body("_links.search.href", containsString("{?"));
|
||||
}
|
||||
|
||||
@Then("the device root _links.replacement is templated")
|
||||
public void deviceRootLinksReplacementIsTemplated() {
|
||||
lastResponse.then().statusCode(200)
|
||||
.body("_links.replacement.templated", equalTo(true))
|
||||
.body("_links.replacement.href", containsString("{"));
|
||||
}
|
||||
|
||||
@Then("the root _links contains a {string} entry")
|
||||
public void rootLinksContainsEntry(String key) {
|
||||
lastResponse.then().statusCode(200)
|
||||
.body("_links." + key, notNullValue());
|
||||
}
|
||||
|
||||
@Then("{string} is in the device results")
|
||||
public void isInDeviceResults(String name) {
|
||||
lastResponse.then().statusCode(200).body("name", hasItem(name));
|
||||
}
|
||||
|
||||
@Then("{string} is not in the device results")
|
||||
public void isNotInDeviceResults(String name) {
|
||||
lastResponse.then().statusCode(200).body("name", not(hasItem(name)));
|
||||
}
|
||||
|
||||
@Then("the response body contains an iotProfile")
|
||||
public void responseBodyContainsIotProfile() {
|
||||
lastResponse.then().statusCode(200).body("iotProfile", notNullValue());
|
||||
}
|
||||
|
||||
@Then("the replacement candidates contain {string}")
|
||||
public void replacementCandidatesContain(String name) {
|
||||
lastResponse.then().statusCode(200).body("candidates.name", hasItem(name));
|
||||
}
|
||||
}
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
Feature: Device registry — dedicated entry point
|
||||
As an IoT device agent
|
||||
I want a dedicated /devices entry point with device-specific navigation
|
||||
So that I can discover compatible replacement services without touching the agent service world
|
||||
|
||||
Background:
|
||||
Given a production IoT service "SmartHub Service" with deviceClass "device.class.smart-home-hub" and protocol "MQTT_5_0"
|
||||
And a production IoT service "SensorBridge" with deviceClass "device.class.industrial-sensor" and protocol "WEBSOCKET"
|
||||
And a production service "PlainApi" with no IoT profile
|
||||
|
||||
Scenario: GET /devices returns a navigation document with templated links
|
||||
When GET /devices is called with no query params
|
||||
Then the response is HTTP 200
|
||||
And the device root _links.self href ends with "/devices"
|
||||
And the device root _links.search is templated
|
||||
And the device root _links.replacement is templated
|
||||
|
||||
Scenario: Registry root exposes a devices link
|
||||
When GET / is called
|
||||
Then the response is HTTP 200
|
||||
And the root _links contains a "devices" entry
|
||||
|
||||
Scenario: Device search by capability returns only IoT-ready services
|
||||
When GET /devices?capability=device.telemetry is called
|
||||
Then the response is HTTP 200
|
||||
And "SmartHub Service" is in the device results
|
||||
And "SensorBridge" is in the device results
|
||||
And "PlainApi" is not in the device results
|
||||
|
||||
Scenario: Device search by deviceClass narrows results
|
||||
When GET /devices?deviceClass=device.class.smart-home-hub is called
|
||||
Then the response is HTTP 200
|
||||
And "SmartHub Service" is in the device results
|
||||
And "SensorBridge" is not in the device results
|
||||
|
||||
Scenario: Device search by protocol narrows results
|
||||
When GET /devices?protocol=MQTT_5_0 is called
|
||||
Then the response is HTTP 200
|
||||
And "SmartHub Service" is in the device results
|
||||
And "SensorBridge" is not in the device results
|
||||
|
||||
Scenario: GET /devices/{id} returns the device service with its IoT profile
|
||||
When GET /devices/{smartHubServiceId} is called
|
||||
Then the response is HTTP 200
|
||||
And the response body contains an iotProfile
|
||||
|
||||
Scenario: GET /devices/{id} returns 404 for a service without an IoT profile
|
||||
When GET /devices/{plainApiId} is called
|
||||
Then the response is HTTP 404
|
||||
|
||||
Scenario: Device replacement discovery at /devices/{id}/replacements
|
||||
Given a deprecated device service "OldHub" with locked set to false
|
||||
And "SmartHub Service" has declared device compatibility with "OldHub"
|
||||
When GET /devices/{oldHubId}/replacements is called
|
||||
Then the response is HTTP 200
|
||||
And the replacement candidates contain "SmartHub Service"
|
||||
+2
-1
@@ -82,6 +82,7 @@ Feature: Organisation audit log
|
||||
|
||||
Scenario: Audit log returns events newest first
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
When time advances by 2 minutes
|
||||
And the owner has initiated key rotation using the rotation secret
|
||||
When the owner requests the audit log
|
||||
And the owner requests the audit log
|
||||
Then the first audit event is "TAN_ISSUED"
|
||||
|
||||
+40
-8
@@ -12,10 +12,17 @@ Feature: BSF admin actions — temp grants, revocation, TAN-based key rotation
|
||||
And the effective O-level is "OPERATIONALLY_VERIFIED"
|
||||
And the earned O-level is "IDENTITY_VERIFIED"
|
||||
|
||||
Scenario: Effective O-level drops back to earned level after temp grant expires
|
||||
Scenario: Effective O-level still active 1 nanosecond before the temp grant expires
|
||||
Given an organisation has earned O-level "IDENTITY_VERIFIED" with target "IDENTITY_VERIFIED"
|
||||
And a BSF admin grants a temporary level "OPERATIONALLY_VERIFIED" expiring in 2 hours
|
||||
When time advances by 3 hours
|
||||
When time advances by 1 nanosecond short of 2 hours
|
||||
And the caller reads the organisation
|
||||
Then the effective O-level is "OPERATIONALLY_VERIFIED"
|
||||
|
||||
Scenario: Effective O-level drops to earned level at exactly 2 hours
|
||||
Given an organisation has earned O-level "IDENTITY_VERIFIED" with target "IDENTITY_VERIFIED"
|
||||
And a BSF admin grants a temporary level "OPERATIONALLY_VERIFIED" expiring in 2 hours
|
||||
When time advances by 2 hours
|
||||
And the caller reads the organisation
|
||||
Then the effective O-level is "IDENTITY_VERIFIED"
|
||||
|
||||
@@ -69,10 +76,19 @@ Feature: BSF admin actions — temp grants, revocation, TAN-based key rotation
|
||||
When the owner initiates key rotation using an invalid rotation secret
|
||||
Then the response status is 403
|
||||
|
||||
Scenario: Key rotation TAN expires before confirmation — rotation is rejected
|
||||
Scenario: Key rotation TAN still valid 1 nanosecond before the 5-minute expiry
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has initiated key rotation using the rotation secret
|
||||
When time advances by 6 minutes
|
||||
When time advances by 1 nanosecond short of 5 minutes
|
||||
And the owner confirms key rotation with the TAN
|
||||
Then the response status is 200
|
||||
And a new api key with prefix "apix_org_" is returned
|
||||
And a new rotation secret with prefix "apix_rot_" is returned
|
||||
|
||||
Scenario: Key rotation TAN expires at exactly 5 minutes
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has initiated key rotation using the rotation secret
|
||||
When time advances by 5 minutes
|
||||
And the owner confirms key rotation with the TAN
|
||||
Then the response status is 422
|
||||
|
||||
@@ -132,10 +148,19 @@ Feature: BSF admin actions — temp grants, revocation, TAN-based key rotation
|
||||
Then the response status is 200
|
||||
And the response message confirms TAN was sent
|
||||
|
||||
Scenario: TAN is rejected after its 5-minute validity window
|
||||
Scenario: Emergency TAN still valid 1 nanosecond before the 5-minute expiry
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has requested a TAN using the registered email
|
||||
When time advances by 6 minutes
|
||||
When time advances by 1 nanosecond short of 5 minutes
|
||||
And the owner uses the TAN to rotate keys
|
||||
Then the response status is 200
|
||||
And a new api key with prefix "apix_org_" is returned
|
||||
And a new rotation secret with prefix "apix_rot_" is returned
|
||||
|
||||
Scenario: Emergency TAN expires at exactly 5 minutes
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has requested a TAN using the registered email
|
||||
When time advances by 5 minutes
|
||||
And the owner uses the TAN to rotate keys
|
||||
Then the response status is 422
|
||||
|
||||
@@ -145,9 +170,16 @@ Feature: BSF admin actions — temp grants, revocation, TAN-based key rotation
|
||||
When the owner requests a TAN using the registered email
|
||||
Then the response status is 422
|
||||
|
||||
Scenario: TAN request counter resets after 24 hours
|
||||
Scenario: TAN request counter is still active at exactly 24 hours
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has requested a TAN 3 times within the last 24 hours
|
||||
When time advances by 25 hours
|
||||
When time advances by 24 hours
|
||||
And the owner requests a TAN using the registered email
|
||||
Then the response status is 422
|
||||
|
||||
Scenario: TAN request counter resets at 24 hours and 1 nanosecond
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the owner has requested a TAN 3 times within the last 24 hours
|
||||
When time advances by 24 hours and 1 nanosecond
|
||||
And the owner requests a TAN using the registered email
|
||||
Then the response status is 200
|
||||
|
||||
+12
-2
@@ -42,11 +42,21 @@ Feature: DNS-challenge key rotation — bot-friendly ACME DNS-01 pattern
|
||||
When the agent confirms DNS-challenge key rotation
|
||||
Then the response status is 422
|
||||
|
||||
Scenario: Confirm fails when the 15-minute challenge window has expired
|
||||
Scenario: DNS challenge window still active 1 nanosecond before the 15-minute expiry
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the agent has initiated DNS-challenge key rotation
|
||||
And the agent has published the rotation challenge to dns
|
||||
When time advances by 16 minutes
|
||||
When time advances by 1 nanosecond short of 15 minutes
|
||||
And the agent confirms DNS-challenge key rotation
|
||||
Then the response status is 200
|
||||
And a new api key with prefix "apix_org_" is returned
|
||||
And a new rotation secret with prefix "apix_rot_" is returned
|
||||
|
||||
Scenario: Confirm fails at exactly the 15-minute challenge window expiry
|
||||
Given an organisation is registered with target level "UNVERIFIED" for domain "example.com"
|
||||
And the agent has initiated DNS-challenge key rotation
|
||||
And the agent has published the rotation challenge to dns
|
||||
When time advances by 15 minutes
|
||||
And the agent confirms DNS-challenge key rotation
|
||||
Then the response status is 422
|
||||
|
||||
|
||||
+58
@@ -0,0 +1,58 @@
|
||||
Feature: Agent experience feedback
|
||||
|
||||
Background:
|
||||
Given a sandbox named "feedback-demo" exists
|
||||
|
||||
Scenario: Feedback schema is globally discoverable without authentication
|
||||
When GET /sandbox/feedback-schema is called without authentication
|
||||
Then the response code is 200
|
||||
And the schema contains at least 9 dimensions
|
||||
And the schema contains dimension key "hateoas_navigation"
|
||||
And the schema contains dimension key "liveness_signal_accuracy"
|
||||
And the schema contains dimension key "error_message_quality"
|
||||
And the schema contains dimension key "extension_property_coverage"
|
||||
And the schema scale minimum is 0 and maximum is 10
|
||||
|
||||
Scenario: Submit valid feedback returns 202
|
||||
When feedback is submitted to "feedback-demo" with scores hateoas_navigation=8 discovery_accuracy=7
|
||||
Then the response code is 202
|
||||
And the response message is "Feedback recorded. Thank you."
|
||||
And the response contains _links.schema
|
||||
And the response contains _links.sandbox
|
||||
|
||||
Scenario: Submit feedback with model identity
|
||||
When feedback is submitted to "feedback-demo" with scores hateoas_navigation=9 and model "claude-sonnet-4-6" provider "anthropic"
|
||||
Then the response code is 202
|
||||
|
||||
Scenario: Submit feedback with unknown dimension keys is accepted but ignored
|
||||
When feedback is submitted to "feedback-demo" with scores unknown_key=5
|
||||
Then the response code is 422
|
||||
|
||||
Scenario: Submit feedback with score out of range returns 422
|
||||
When feedback is submitted to "feedback-demo" with scores hateoas_navigation=11
|
||||
Then the response code is 422
|
||||
|
||||
Scenario: Submit feedback with empty scores returns 400
|
||||
When feedback is submitted to "feedback-demo" with empty scores
|
||||
Then the response code is 400
|
||||
|
||||
Scenario: Aggregate feedback requires sandbox API key
|
||||
When the feedback aggregate for "feedback-demo" is requested without an API key
|
||||
Then the response code is 401
|
||||
|
||||
Scenario: Aggregate feedback shows averages per dimension
|
||||
Given feedback has been submitted to "feedback-demo" with scores hateoas_navigation=6 discovery_accuracy=8
|
||||
And feedback has been submitted to "feedback-demo" with scores hateoas_navigation=10 discovery_accuracy=4
|
||||
When the feedback aggregate for "feedback-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the total submissions is 2
|
||||
And the dimension "hateoas_navigation" has average 8.0
|
||||
And the dimension "discovery_accuracy" has average 6.0
|
||||
|
||||
Scenario: Aggregate includes provider breakdown
|
||||
Given feedback has been submitted to "feedback-demo" with scores hateoas_navigation=7 and model "claude-sonnet-4-6" provider "anthropic"
|
||||
And feedback has been submitted to "feedback-demo" with scores hateoas_navigation=5 and model "gpt-4o" provider "openai"
|
||||
When the feedback aggregate for "feedback-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the submissionsByProvider contains "anthropic" with count 1
|
||||
And the submissionsByProvider contains "openai" with count 1
|
||||
+33
@@ -0,0 +1,33 @@
|
||||
Feature: HATEOAS navigation and root key resolution
|
||||
|
||||
Background:
|
||||
Given a sandbox named "nav-demo" exists
|
||||
|
||||
Scenario: Root resource without API key omits _links.sandbox
|
||||
When the root resource is requested without an API key
|
||||
Then the response code is 200
|
||||
And the response contains _links.registerSandbox
|
||||
And the response contains _links.feedbackSchema
|
||||
And the response does not contain _links.sandbox
|
||||
|
||||
Scenario: Root resource with valid sandbox API key includes _links.sandbox
|
||||
When the root resource is requested with the sandbox API key for "nav-demo"
|
||||
Then the response code is 200
|
||||
And the response contains _links.sandbox ending with "/sandbox/nav-demo"
|
||||
|
||||
Scenario: Root resource with unknown API key omits _links.sandbox
|
||||
When the root resource is requested with API key "apix_sb_unknownkey"
|
||||
Then the response code is 200
|
||||
And the response does not contain _links.sandbox
|
||||
|
||||
Scenario: Sandbox root endpoint returns sandbox metadata
|
||||
When the sandbox root for "nav-demo" is requested
|
||||
Then the response code is 200
|
||||
And the response contains sandbox name "nav-demo"
|
||||
And the response contains _links.services
|
||||
And the response contains _links.submitFeedback
|
||||
And the response contains _links.feedbackSchema
|
||||
|
||||
Scenario: Sandbox root for unknown name returns 404
|
||||
When the sandbox root for "does-not-exist" is requested
|
||||
Then the response code is 404
|
||||
+32
@@ -0,0 +1,32 @@
|
||||
Feature: Sandbox registration
|
||||
|
||||
Scenario: Create a sandbox and receive an API key exactly once
|
||||
When an agent registers a sandbox named "peter-demo" with email "peter@openclaw.io"
|
||||
Then the response code is 201
|
||||
And the response contains a sandbox id
|
||||
And the response contains an API key with prefix "apix_sb_"
|
||||
And the response contains tier "FREE"
|
||||
And the response contains a non-null expiresAt
|
||||
And the response contains _links.self ending with "/sandbox/peter-demo"
|
||||
And the response contains _links.services
|
||||
|
||||
Scenario: Registration fails when name contains uppercase letters
|
||||
When an agent registers a sandbox named "Peter-Demo" with email "peter@openclaw.io"
|
||||
Then the response code is 400
|
||||
|
||||
Scenario: Registration fails when name is too short
|
||||
When an agent registers a sandbox named "ab" with email "peter@openclaw.io"
|
||||
Then the response code is 400
|
||||
|
||||
Scenario: Registration fails when name starts with a hyphen
|
||||
When an agent registers a sandbox named "-demo" with email "peter@openclaw.io"
|
||||
Then the response code is 400
|
||||
|
||||
Scenario: Registration fails when email is invalid
|
||||
When an agent registers a sandbox named "valid-name" with email "not-an-email"
|
||||
Then the response code is 400
|
||||
|
||||
Scenario: Duplicate name is rejected with 409
|
||||
Given a sandbox named "duplicate-test" exists
|
||||
When an agent registers a sandbox named "duplicate-test" with email "other@example.com"
|
||||
Then the response code is 409
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
Feature: Sandbox service isolation from production
|
||||
|
||||
Background:
|
||||
Given a sandbox named "isolation-test" exists
|
||||
And a production service "ProdService" with endpoint "https://prod.example.com" is registered
|
||||
|
||||
Scenario: Service registered in sandbox does not appear in production list
|
||||
Given a sandbox service with endpoint "https://sandbox.example.com" and capability "test.cap" is registered in "isolation-test"
|
||||
When GET /services is called without authentication
|
||||
Then the response code is 200
|
||||
And "https://sandbox.example.com" is not in the endpoint list
|
||||
|
||||
Scenario: Production service does not appear in sandbox service list
|
||||
Given a sandbox service with endpoint "https://sandbox.example.com" and capability "test.cap" is registered in "isolation-test"
|
||||
When the sandbox service list for "isolation-test" is requested
|
||||
Then the response code is 200
|
||||
And "https://prod.example.com" is not in the endpoint list
|
||||
|
||||
Scenario: Service registration in sandbox requires the sandbox API key
|
||||
When a service is registered in sandbox "isolation-test" without an API key
|
||||
Then the response code is 401
|
||||
|
||||
Scenario: Service registration in sandbox with wrong key returns 401
|
||||
When a service is registered in sandbox "isolation-test" with API key "apix_sb_wrongkey"
|
||||
Then the response code is 401
|
||||
|
||||
Scenario: Sandbox search is isolated from production results
|
||||
Given a sandbox service with endpoint "https://sb-search.example.com" and capability "search.cap" is registered in "isolation-test"
|
||||
When sandbox "isolation-test" services are searched by capability "search.cap"
|
||||
Then the response code is 200
|
||||
And "https://sb-search.example.com" is in the endpoint list
|
||||
|
||||
Scenario: Sandbox search does not return production services
|
||||
Given a production service "ProdSearchService" with endpoint "https://prod-search.example.com" is registered
|
||||
When sandbox "isolation-test" services are searched by capability "device.telemetry"
|
||||
Then the response code is 200
|
||||
And "https://prod-search.example.com" is not in the endpoint list
|
||||
|
||||
Scenario: Sandbox services can be registered with extension properties and queried by them
|
||||
Given a sandbox service with endpoint "https://ext-eu.example.com" capability "data.processing" and extension "region:eu" is registered in "isolation-test"
|
||||
And a sandbox service with endpoint "https://ext-us.example.com" capability "data.processing" and extension "region:us" is registered in "isolation-test"
|
||||
When sandbox "isolation-test" services are searched by capability "data.processing" and property "region:eu"
|
||||
Then the response code is 200
|
||||
And "https://ext-eu.example.com" is in the endpoint list
|
||||
And "https://ext-us.example.com" is not in the endpoint list
|
||||
+43
@@ -0,0 +1,43 @@
|
||||
Feature: Sandbox telemetry
|
||||
|
||||
Background:
|
||||
Given a sandbox named "telemetry-demo" exists
|
||||
|
||||
Scenario: Telemetry starts empty before any activity
|
||||
When the telemetry for "telemetry-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the usage map is empty or contains only zero counts
|
||||
|
||||
Scenario: Viewing the sandbox root increments the SANDBOX_VIEWED counter
|
||||
Given the sandbox root for "telemetry-demo" has been viewed once
|
||||
When the telemetry for "telemetry-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the usage counter "SANDBOX_VIEWED" is at least 1
|
||||
|
||||
Scenario: Registering a service increments the SERVICE_REGISTERED counter
|
||||
Given a sandbox service with endpoint "https://tel.example.com" and capability "tel.cap" is registered in "telemetry-demo"
|
||||
When the telemetry for "telemetry-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the usage counter "SERVICE_REGISTERED" is at least 1
|
||||
|
||||
Scenario: Listing services increments the SERVICE_LISTED counter
|
||||
Given the sandbox service list for "telemetry-demo" has been requested once
|
||||
When the telemetry for "telemetry-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the usage counter "SERVICE_LISTED" is at least 1
|
||||
|
||||
Scenario: Telemetry requires the sandbox API key
|
||||
When the telemetry for "telemetry-demo" is requested without an API key
|
||||
Then the response code is 401
|
||||
|
||||
Scenario: Telemetry with wrong key returns 401
|
||||
When the telemetry for "telemetry-demo" is requested with API key "apix_sb_wrongkey"
|
||||
Then the response code is 401
|
||||
|
||||
Scenario: Telemetry response includes tier metadata
|
||||
When the telemetry for "telemetry-demo" is requested with the sandbox API key
|
||||
Then the response code is 200
|
||||
And the telemetry contains tier "FREE"
|
||||
And the telemetry contains ratePerMinute 60
|
||||
And the telemetry contains maxServices 10
|
||||
And the telemetry contains maxOrgs 3
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
Feature: Sandbox tier caps
|
||||
|
||||
Scenario: FREE sandbox allows up to 10 services
|
||||
Given a sandbox named "cap-test" exists
|
||||
And 10 services have been registered in sandbox "cap-test"
|
||||
When a service is registered in sandbox "cap-test" with the sandbox API key
|
||||
Then the response code is 429
|
||||
And the sandbox error contains "Service limit reached"
|
||||
@@ -0,0 +1,52 @@
|
||||
package org.botstandards.apix.registry;
|
||||
|
||||
import org.eclipse.microprofile.openapi.annotations.OpenAPIDefinition;
|
||||
import org.eclipse.microprofile.openapi.annotations.info.Contact;
|
||||
import org.eclipse.microprofile.openapi.annotations.info.Info;
|
||||
import org.eclipse.microprofile.openapi.annotations.security.SecurityRequirement;
|
||||
import org.eclipse.microprofile.openapi.annotations.security.SecurityScheme;
|
||||
import org.eclipse.microprofile.openapi.annotations.security.SecuritySchemes;
|
||||
import org.eclipse.microprofile.openapi.annotations.enums.SecuritySchemeIn;
|
||||
import org.eclipse.microprofile.openapi.annotations.enums.SecuritySchemeType;
|
||||
import jakarta.ws.rs.core.Application;
|
||||
|
||||
@OpenAPIDefinition(
|
||||
info = @Info(
|
||||
title = "APIX Registry API",
|
||||
version = "0.1",
|
||||
description = """
|
||||
The open autonomous agent service discovery registry.
|
||||
|
||||
## Agent Workflow
|
||||
|
||||
1. `GET /` — Read the HATEOAS root to discover all entry points and this OpenAPI spec URL.
|
||||
2. `GET /services?capability=<term>` — Search for PRODUCTION services by capability keyword \
|
||||
(e.g. nlp, translation, speech-to-text, image-classification).
|
||||
3. Follow `openApiSpecUrl` or `mcpSpecUrl` in the returned service record to learn how to call it.
|
||||
|
||||
## Registering a Service
|
||||
|
||||
POST /services with a BSM payload and an `X-Api-Key` header. \
|
||||
The endpoint URL is the unique key — re-posting the same endpoint updates the existing record (UPSERT).
|
||||
|
||||
## Verification Levels (O-levels)
|
||||
|
||||
Services start at UNVERIFIED. The registry progressively verifies registrant identity (O1 DNS), \
|
||||
legal entity (O2 GLEIF/LEI), and technical hygiene (O3). Higher O-levels indicate greater \
|
||||
trustworthiness. Filter replacement candidates by minimum O-level via \
|
||||
`GET /services/{id}/replacements?minOLevel=LEGAL_ENTITY_VERIFIED`.
|
||||
""",
|
||||
contact = @Contact(url = "https://api-index.org")
|
||||
),
|
||||
security = @SecurityRequirement(name = "ApiKey")
|
||||
)
|
||||
@SecuritySchemes(
|
||||
@SecurityScheme(
|
||||
securitySchemeName = "ApiKey",
|
||||
type = SecuritySchemeType.APIKEY,
|
||||
apiKeyName = "X-Api-Key",
|
||||
in = SecuritySchemeIn.HEADER,
|
||||
description = "Registry write key. Required for POST /services, PATCH /services/{id}, and all other write operations. Read operations (GET) are unauthenticated. Contact the registry operator at https://api-index.org to obtain a key."
|
||||
)
|
||||
)
|
||||
public class RegistryApiConfig extends Application {}
|
||||
+17
@@ -0,0 +1,17 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public record DeviceIndexResponse(
|
||||
@JsonProperty("_links") DeviceLinks links
|
||||
) {
|
||||
public record DeviceLinks(
|
||||
LinkRef self,
|
||||
LinkRef search,
|
||||
LinkRef replacement
|
||||
) {
|
||||
public record LinkRef(String href, boolean templated) {
|
||||
public LinkRef(String href) { this(href, false); }
|
||||
}
|
||||
}
|
||||
}
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public record FeedbackAggregateResponse(
|
||||
String sandboxId,
|
||||
String name,
|
||||
int totalSubmissions,
|
||||
List<DimensionScore> scores,
|
||||
Map<String, Integer> submissionsByProvider,
|
||||
@JsonProperty("_links") SandboxLinks links
|
||||
) {
|
||||
public record DimensionScore(
|
||||
String key,
|
||||
String question,
|
||||
double average,
|
||||
int votes
|
||||
) {}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
/** A single rated dimension in the agent experience feedback schema. */
|
||||
public record FeedbackDimension(
|
||||
String key,
|
||||
String question,
|
||||
String minLabel,
|
||||
String maxLabel
|
||||
) {}
|
||||
+15
@@ -0,0 +1,15 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.util.List;
|
||||
|
||||
public record FeedbackSchemaResponse(
|
||||
String description,
|
||||
Scale scale,
|
||||
List<FeedbackDimension> dimensions,
|
||||
@JsonProperty("_links") FeedbackSchemaLinks links
|
||||
) {
|
||||
public record Scale(int min, int max, String minLabel, String maxLabel) {}
|
||||
|
||||
public record FeedbackSchemaLinks(SandboxLinks.LinkRef self) {}
|
||||
}
|
||||
+25
@@ -0,0 +1,25 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import java.util.Map;
|
||||
|
||||
public record FeedbackSubmissionRequest(
|
||||
|
||||
@NotEmpty
|
||||
Map<String, Integer> scores,
|
||||
|
||||
@Size(max = 255)
|
||||
String agentIdentifier,
|
||||
|
||||
@Size(max = 500)
|
||||
String comment,
|
||||
|
||||
/** Full model identifier as the agent knows it, e.g. "claude-sonnet-4-6", "gpt-4o-2024-11-20". */
|
||||
@Size(max = 255)
|
||||
String modelIdentifier,
|
||||
|
||||
/** Provider family: "anthropic", "openai", "google", "meta", "mistral" … */
|
||||
@Size(max = 100)
|
||||
String modelProvider
|
||||
) {}
|
||||
@@ -0,0 +1,44 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public record IndexResponse(
|
||||
String apixVersion,
|
||||
String name,
|
||||
String description,
|
||||
RegistryStats stats,
|
||||
@JsonProperty("_links") RegistryLinks links
|
||||
) {
|
||||
public record RegistryStats(
|
||||
long registeredServices,
|
||||
long liveServices
|
||||
) {}
|
||||
|
||||
public record RegistryLinks(
|
||||
LinkRef self,
|
||||
LinkRef services,
|
||||
LinkRef servicesSearch,
|
||||
LinkRef devices,
|
||||
LinkRef organizations,
|
||||
LinkRef health,
|
||||
LinkRef openapi,
|
||||
LinkRef registerSandbox,
|
||||
LinkRef feedbackSchema,
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
LinkRef sandbox
|
||||
) {
|
||||
/** Constructor for anonymous / non-sandbox-keyed requests. */
|
||||
public RegistryLinks(LinkRef self, LinkRef services, LinkRef servicesSearch,
|
||||
LinkRef devices, LinkRef organizations,
|
||||
LinkRef health, LinkRef openapi, LinkRef registerSandbox,
|
||||
LinkRef feedbackSchema) {
|
||||
this(self, services, servicesSearch, devices, organizations,
|
||||
health, openapi, registerSandbox, feedbackSchema, null);
|
||||
}
|
||||
|
||||
public record LinkRef(String href, boolean templated) {
|
||||
public LinkRef(String href) { this(href, false); }
|
||||
}
|
||||
}
|
||||
}
|
||||
+15
@@ -5,13 +5,28 @@ import jakarta.validation.constraints.Email;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import org.botstandards.apix.common.OLevel;
|
||||
import org.eclipse.microprofile.openapi.annotations.media.Schema;
|
||||
|
||||
@Schema(description = "Request body for registering a new organisation. An organisation record establishes the legal identity of a registrant and is the starting point for O-level verification. Services are registered independently via POST /services using the registrant fields in the BSM payload — no prior organisation registration is required to submit a service.")
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public record OrgRegistrationRequest(
|
||||
|
||||
@Schema(description = "Full legal name of the organisation or individual.", example = "Acme GmbH")
|
||||
@NotBlank String registrantName,
|
||||
|
||||
@Schema(description = "Contact email. Used for O-level verification notifications and key rotation challenges.", example = "ops@acme.example")
|
||||
@NotBlank @Email String registrantEmail,
|
||||
|
||||
@Schema(description = "ISO 3166-1 alpha-2 country code of the legal jurisdiction.", example = "DE")
|
||||
@NotBlank String registrantJurisdiction,
|
||||
|
||||
@Schema(description = "Legal form: INDIVIDUAL, COMMERCIAL, NON_PROFIT, GOVERNMENT, ACADEMIC.")
|
||||
@NotBlank String registrantOrgType,
|
||||
|
||||
@Schema(description = "Primary internet domain controlled by this organisation (e.g. acme.example). The registry places a DNS TXT record challenge on this domain for O1 identity verification.", example = "acme.example")
|
||||
@NotBlank String domain,
|
||||
|
||||
@Schema(description = "Desired verification level. The registry initiates verification automatically up to this level. Use IDENTITY_VERIFIED (O1) to start; upgrade later via PATCH /organizations/{id}/request-upgrade.")
|
||||
@NotNull OLevel targetOLevel
|
||||
|
||||
) {}
|
||||
|
||||
+13
@@ -0,0 +1,13 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.time.Instant;
|
||||
|
||||
public record SandboxIndexResponse(
|
||||
String sandboxId,
|
||||
String name,
|
||||
String tier,
|
||||
int ratePerMinute,
|
||||
Instant expiresAt,
|
||||
@JsonProperty("_links") SandboxLinks links
|
||||
) {}
|
||||
@@ -0,0 +1,20 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
/** Shared HATEOAS link structure for sandbox responses. */
|
||||
public record SandboxLinks(
|
||||
LinkRef self,
|
||||
LinkRef services,
|
||||
@JsonProperty("servicesSearch") LinkRef servicesSearch,
|
||||
LinkRef submitFeedback,
|
||||
LinkRef feedbackSchema,
|
||||
/** Dashboard URL on the portal. Null if portal URL is not configured. */
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
LinkRef dashboard
|
||||
) {
|
||||
public record LinkRef(String href, boolean templated) {
|
||||
public LinkRef(String href) { this(href, false); }
|
||||
}
|
||||
}
|
||||
+27
@@ -0,0 +1,27 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import jakarta.validation.constraints.Email;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Pattern;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
public record SandboxRegistrationRequest(
|
||||
|
||||
@NotBlank
|
||||
@Size(min = 3, max = 100)
|
||||
@Pattern(regexp = "^[a-z0-9][a-z0-9-]*[a-z0-9]$",
|
||||
message = "name must be lowercase alphanumeric with hyphens, no leading or trailing hyphen")
|
||||
String name,
|
||||
|
||||
@Email
|
||||
@Size(max = 255)
|
||||
String contactEmail,
|
||||
|
||||
/**
|
||||
* Optional: owner-declared location shown on the sandbox map (e.g. "Berlin, Germany").
|
||||
* Raw registration IPs are never stored. If provided, geocoded once at registration time.
|
||||
* Omit or set null to register anonymously — sandbox functions identically either way.
|
||||
*/
|
||||
@Size(max = 200)
|
||||
String location
|
||||
) {}
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.time.Instant;
|
||||
|
||||
public record SandboxRegistrationResponse(
|
||||
String sandboxId,
|
||||
String name,
|
||||
/** Plaintext API key — shown exactly once. Embed in agents as X-Api-Key. */
|
||||
String apiKey,
|
||||
/** Plaintext maintenance key — shown exactly once. Keep private; used only for lifecycle operations. */
|
||||
String maintenanceKey,
|
||||
String tier,
|
||||
int ratePerMinute,
|
||||
Instant expiresAt,
|
||||
/** Dashboard URL on the portal — bookmark this to monitor your sandbox. */
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
String dashboardUrl,
|
||||
@JsonProperty("_links") SandboxLinks links
|
||||
) {}
|
||||
+24
@@ -0,0 +1,24 @@
|
||||
package org.botstandards.apix.registry.dto;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import java.time.Instant;
|
||||
import java.util.Map;
|
||||
|
||||
public record SandboxTelemetryResponse(
|
||||
String sandboxId,
|
||||
String name,
|
||||
String tier,
|
||||
int ratePerMinute,
|
||||
Instant expiresAt,
|
||||
/** NULL = unlimited (COMMUNITY / FOUNDER). */
|
||||
@JsonInclude(JsonInclude.Include.ALWAYS)
|
||||
Integer maxServices,
|
||||
@JsonInclude(JsonInclude.Include.ALWAYS)
|
||||
Integer maxOrgs,
|
||||
/** Cumulative counts keyed by event type since sandbox creation. */
|
||||
Map<String, Long> usage,
|
||||
/** Timestamp of the most recent tracked request across all event types. */
|
||||
Instant lastActivityAt,
|
||||
@JsonProperty("_links") SandboxLinks links
|
||||
) {}
|
||||
@@ -7,6 +7,7 @@ import org.botstandards.apix.registry.dto.IotProfileResponse;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
@@ -37,7 +38,9 @@ public record ServiceResponse(
|
||||
List<UUID> replacesServiceIds,
|
||||
Instant registeredAt,
|
||||
Instant lastUpdatedAt,
|
||||
IotProfileResponse iotProfile
|
||||
IotProfileResponse iotProfile,
|
||||
@JsonInclude(JsonInclude.Include.NON_EMPTY)
|
||||
Map<String, Object> extensions
|
||||
) {
|
||||
public static ServiceResponse from(ServiceEntity e) {
|
||||
BsmPayload b = e.bsmPayload;
|
||||
@@ -68,7 +71,8 @@ public record ServiceResponse(
|
||||
b.replacesServiceIds(),
|
||||
e.registeredAt,
|
||||
e.lastUpdatedAt,
|
||||
IotProfileResponse.from(e.iotProfile)
|
||||
IotProfileResponse.from(e.iotProfile),
|
||||
b.extensions()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
package org.botstandards.apix.registry.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.Instant;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "sandboxes")
|
||||
public class SandboxEntity {
|
||||
|
||||
@Id
|
||||
@Column(columnDefinition = "uuid")
|
||||
public UUID id;
|
||||
|
||||
@Column(nullable = false)
|
||||
public String name;
|
||||
|
||||
@Column(name = "contact_email")
|
||||
public String contactEmail;
|
||||
|
||||
@Column(name = "api_key_hash", nullable = false, unique = true)
|
||||
public String apiKeyHash;
|
||||
|
||||
@Column(nullable = false)
|
||||
public String tier;
|
||||
|
||||
@Column(name = "rate_per_minute", nullable = false)
|
||||
public int ratePerMinute;
|
||||
|
||||
/** NULL = unlimited (COMMUNITY / FOUNDER tiers, or BSF manual override). */
|
||||
@Column(name = "max_services")
|
||||
public Integer maxServices;
|
||||
|
||||
/** NULL = unlimited. */
|
||||
@Column(name = "max_orgs")
|
||||
public Integer maxOrgs;
|
||||
|
||||
@Column(name = "created_at", nullable = false)
|
||||
public Instant createdAt;
|
||||
|
||||
@Column(name = "expires_at", nullable = false)
|
||||
public Instant expiresAt;
|
||||
|
||||
/** Owner-declared location string. Null if not provided at registration. */
|
||||
@Column(name = "registrar_location")
|
||||
public String registrarLocation;
|
||||
|
||||
/** Geocoded latitude of registrarLocation. Null if not provided or geocoding failed. */
|
||||
@Column(name = "registrar_lat")
|
||||
public Double registrarLat;
|
||||
|
||||
/** Geocoded longitude of registrarLocation. Null if not provided or geocoding failed. */
|
||||
@Column(name = "registrar_lon")
|
||||
public Double registrarLon;
|
||||
|
||||
/** SHA-256 hash of the maintenance key. Used for lifecycle operations (extend, rotate). */
|
||||
@Column(name = "maintenance_key_hash", nullable = false)
|
||||
public String maintenanceKeyHash;
|
||||
}
|
||||
+4
-1
@@ -18,9 +18,12 @@ public class ServiceEntity {
|
||||
@Column(columnDefinition = "uuid")
|
||||
public UUID id;
|
||||
|
||||
@Column(name = "endpoint_url", nullable = false, unique = true)
|
||||
@Column(name = "endpoint_url", nullable = false)
|
||||
public String endpointUrl;
|
||||
|
||||
@Column(name = "sandbox_id")
|
||||
public String sandboxId;
|
||||
|
||||
@Convert(converter = BsmPayloadConverter.class)
|
||||
@Column(name = "bsm_payload", columnDefinition = "jsonb", nullable = false)
|
||||
@ColumnTransformer(write = "?::jsonb")
|
||||
|
||||
+63
@@ -0,0 +1,63 @@
|
||||
package org.botstandards.apix.registry.filter;
|
||||
|
||||
import jakarta.annotation.Priority;
|
||||
import jakarta.ws.rs.Priorities;
|
||||
import jakarta.ws.rs.container.ContainerRequestContext;
|
||||
import jakarta.ws.rs.container.ContainerResponseContext;
|
||||
import jakarta.ws.rs.container.ContainerResponseFilter;
|
||||
import jakarta.ws.rs.ext.Provider;
|
||||
|
||||
/**
|
||||
* Sets Cache-Control on every response so Bunny.net edge nodes know what to
|
||||
* cache and for how long. Rules:
|
||||
*
|
||||
* GET / → public, max-age=60 (root nav links are stable)
|
||||
* GET /services, /devices → public, max-age=30 (capability search results)
|
||||
* GET /organizations → public, max-age=30
|
||||
* GET /q/* → no-store (health + metrics — never cache)
|
||||
* 4xx / 5xx responses → no-store (errors must not be served from edge)
|
||||
* Non-GET methods → no-store (writes must always reach origin)
|
||||
*
|
||||
* Bunny.net reads the Cache-Control max-age as the edge TTL when the pull zone
|
||||
* is configured with "Use Cache-Control headers" enabled (set in setup-bunnynet.sh).
|
||||
* Query-string variation is handled by the CDN pull zone config — Bunny.net caches
|
||||
* /services?capability=nlp and /services?capability=translation as separate entries.
|
||||
*/
|
||||
@Provider
|
||||
@Priority(Priorities.HEADER_DECORATOR)
|
||||
public class CacheControlFilter implements ContainerResponseFilter {
|
||||
|
||||
@Override
|
||||
public void filter(ContainerRequestContext req, ContainerResponseContext res) {
|
||||
if (!"GET".equals(req.getMethod())) {
|
||||
set(res, "no-store");
|
||||
return;
|
||||
}
|
||||
|
||||
String path = req.getUriInfo().getPath();
|
||||
|
||||
if (isInternalPath(path)) {
|
||||
set(res, "no-store");
|
||||
return;
|
||||
}
|
||||
|
||||
if (res.getStatus() >= 400) {
|
||||
set(res, "no-store");
|
||||
return;
|
||||
}
|
||||
|
||||
if (path.equals("/") || path.isEmpty()) {
|
||||
set(res, "public, max-age=60");
|
||||
} else {
|
||||
set(res, "public, max-age=30");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isInternalPath(String path) {
|
||||
return path.startsWith("q/") || path.startsWith("/q/");
|
||||
}
|
||||
|
||||
private void set(ContainerResponseContext res, String value) {
|
||||
res.getHeaders().putSingle("Cache-Control", value);
|
||||
}
|
||||
}
|
||||
+82
@@ -0,0 +1,82 @@
|
||||
package org.botstandards.apix.registry.filter;
|
||||
|
||||
import jakarta.annotation.Priority;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.Priorities;
|
||||
import jakarta.ws.rs.container.ContainerRequestContext;
|
||||
import jakarta.ws.rs.container.ContainerRequestFilter;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import jakarta.ws.rs.core.UriBuilder;
|
||||
import jakarta.ws.rs.ext.Provider;
|
||||
import org.botstandards.apix.registry.normalisation.QueryNormalisationService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
/**
|
||||
* Redirects non-canonical query strings to their canonical form so that
|
||||
* Bunny.net stores all semantically equivalent requests under one cache key.
|
||||
*
|
||||
* Three-step flow:
|
||||
* 1. Normalize parameter order and values (QueryNormalisationService)
|
||||
* 2. Compare canonical form to incoming URL
|
||||
* 3. If different → 302 to canonical URL; CDN then caches the final response
|
||||
* under the canonical key. If already canonical → pass through unchanged.
|
||||
*
|
||||
* Why values must stay in the cache key:
|
||||
* ?capability=nlp and ?capability=translation are different result sets.
|
||||
* Removing values would collapse them into one entry — wrong data served.
|
||||
* The canonical form normalises the representation (case, order, defaults)
|
||||
* without losing the semantics encoded in the values.
|
||||
*
|
||||
* Why 302 (not 301):
|
||||
* 301 would itself be cached at the CDN edge. With 302, only the destination
|
||||
* response (the canonical-URL 200) enters the CDN cache. Clients still
|
||||
* learn the canonical form and subsequent requests skip the redirect.
|
||||
*
|
||||
* Paths covered: GET /services, GET /devices, GET /…/replacements.
|
||||
* All other paths — including /q/* health and non-GET methods — pass through.
|
||||
*/
|
||||
@Provider
|
||||
@Priority(Priorities.USER)
|
||||
public class CanonicalQueryFilter implements ContainerRequestFilter {
|
||||
|
||||
@Inject
|
||||
QueryNormalisationService normalisationService;
|
||||
|
||||
@Override
|
||||
public void filter(ContainerRequestContext ctx) throws IOException {
|
||||
if (!"GET".equals(ctx.getMethod())) return;
|
||||
|
||||
String path = ctx.getUriInfo().getPath();
|
||||
String canonical = resolveCanonical(path, ctx);
|
||||
if (canonical == null) return; // path not subject to canonicalisation
|
||||
|
||||
String incoming = rawQuery(ctx);
|
||||
if (canonical.equals(incoming)) return; // already canonical — pass through
|
||||
|
||||
ctx.abortWith(Response.status(Response.Status.FOUND)
|
||||
.location(canonicalUri(ctx, canonical))
|
||||
.build());
|
||||
}
|
||||
|
||||
private String resolveCanonical(String path, ContainerRequestContext ctx) {
|
||||
var q = ctx.getUriInfo().getQueryParameters();
|
||||
String p = path.startsWith("/") ? path.substring(1) : path;
|
||||
if (p.equals("services")) return normalisationService.canonicalForServices(q);
|
||||
if (p.equals("devices")) return normalisationService.canonicalForDevices(q);
|
||||
if (p.endsWith("/replacements")) return normalisationService.canonicalForReplacements(q);
|
||||
return null;
|
||||
}
|
||||
|
||||
private String rawQuery(ContainerRequestContext ctx) {
|
||||
String raw = ctx.getUriInfo().getRequestUri().getRawQuery();
|
||||
return raw != null ? raw : "";
|
||||
}
|
||||
|
||||
private URI canonicalUri(ContainerRequestContext ctx, String canonicalQuery) {
|
||||
UriBuilder b = UriBuilder.fromUri(ctx.getUriInfo().getAbsolutePath());
|
||||
if (!canonicalQuery.isEmpty()) b.replaceQuery(canonicalQuery);
|
||||
return b.build();
|
||||
}
|
||||
}
|
||||
+104
@@ -0,0 +1,104 @@
|
||||
package org.botstandards.apix.registry.normalisation;
|
||||
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.ws.rs.core.MultivaluedMap;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static java.util.stream.Collectors.joining;
|
||||
|
||||
/**
|
||||
* Produces a canonical query string for agent-facing search endpoints.
|
||||
*
|
||||
* Rules applied per parameter:
|
||||
* capability → lowercase, trimmed; omitted if blank
|
||||
* stage → uppercase; omitted if equals the default "PRODUCTION"
|
||||
* deviceClass → lowercase, trimmed; omitted if blank
|
||||
* protocol → uppercase, trimmed; omitted if blank
|
||||
* minOLevel → uppercase, trimmed; omitted if blank
|
||||
*
|
||||
* Unknown parameters are silently dropped — forward-compatibility.
|
||||
* Remaining parameters are sorted alphabetically to produce a stable cache key.
|
||||
*
|
||||
* The canonical form is what Bunny.net uses as the CDN cache key and what
|
||||
* Micrometer counters use as metric tag values. Any variant that normalises
|
||||
* to the same canonical form hits the same cache entry and increments the
|
||||
* same counter.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class QueryNormalisationService {
|
||||
|
||||
private static final String DEFAULT_STAGE = "PRODUCTION";
|
||||
|
||||
/**
|
||||
* Returns the canonical query string for a /services search.
|
||||
* Empty string means no meaningful parameters — bare /services URL.
|
||||
*/
|
||||
public String canonicalForServices(MultivaluedMap<String, String> raw) {
|
||||
Map<String, String> out = new TreeMap<>();
|
||||
put(out, "capability", normaliseCapability(raw.getFirst("capability")));
|
||||
putIfNotDefault(out, "stage", normaliseEnum(raw.getFirst("stage")), DEFAULT_STAGE);
|
||||
return render(out);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the canonical query string for a /devices search.
|
||||
* Empty string means no meaningful parameters — bare /devices URL.
|
||||
*/
|
||||
public String canonicalForDevices(MultivaluedMap<String, String> raw) {
|
||||
Map<String, String> out = new TreeMap<>();
|
||||
put(out, "capability", normaliseCapability(raw.getFirst("capability")));
|
||||
put(out, "deviceClass", normaliseLower(raw.getFirst("deviceClass")));
|
||||
put(out, "protocol", normaliseEnum(raw.getFirst("protocol")));
|
||||
return render(out);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the canonical query string for a /{id}/replacements search.
|
||||
* Empty string means no filter parameters.
|
||||
*/
|
||||
public String canonicalForReplacements(MultivaluedMap<String, String> raw) {
|
||||
Map<String, String> out = new TreeMap<>();
|
||||
put(out, "deviceClass", normaliseLower(raw.getFirst("deviceClass")));
|
||||
put(out, "minOLevel", normaliseEnum(raw.getFirst("minOLevel")));
|
||||
put(out, "protocol", normaliseEnum(raw.getFirst("protocol")));
|
||||
return render(out);
|
||||
}
|
||||
|
||||
// ── Individual value normalisers — package-private for unit tests ──────────
|
||||
|
||||
String normaliseCapability(String v) {
|
||||
if (v == null) return null;
|
||||
String s = v.strip().toLowerCase();
|
||||
return s.isEmpty() ? null : s;
|
||||
}
|
||||
|
||||
String normaliseLower(String v) {
|
||||
if (v == null) return null;
|
||||
String s = v.strip().toLowerCase();
|
||||
return s.isEmpty() ? null : s;
|
||||
}
|
||||
|
||||
String normaliseEnum(String v) {
|
||||
if (v == null) return null;
|
||||
String s = v.strip().toUpperCase();
|
||||
return s.isEmpty() ? null : s;
|
||||
}
|
||||
|
||||
// ── Private helpers ────────────────────────────────────────────────────────
|
||||
|
||||
private void put(Map<String, String> map, String key, String value) {
|
||||
if (value != null) map.put(key, value);
|
||||
}
|
||||
|
||||
private void putIfNotDefault(Map<String, String> map, String key, String value, String defaultValue) {
|
||||
if (value != null && !value.equals(defaultValue)) map.put(key, value);
|
||||
}
|
||||
|
||||
private String render(Map<String, String> params) {
|
||||
return params.entrySet().stream()
|
||||
.map(e -> e.getKey() + "=" + e.getValue())
|
||||
.collect(joining("&"));
|
||||
}
|
||||
}
|
||||
+95
@@ -0,0 +1,95 @@
|
||||
package org.botstandards.apix.registry.resource;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.botstandards.apix.registry.dto.DeviceIndexResponse;
|
||||
import org.botstandards.apix.registry.dto.DeviceIndexResponse.DeviceLinks;
|
||||
import org.botstandards.apix.registry.dto.DeviceIndexResponse.DeviceLinks.LinkRef;
|
||||
import org.botstandards.apix.registry.dto.ReplacementsResponse;
|
||||
import org.botstandards.apix.registry.dto.ServiceResponse;
|
||||
import org.botstandards.apix.registry.service.RegistryService;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Path("/devices")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class DeviceResource {
|
||||
|
||||
@Inject
|
||||
RegistryService registryService;
|
||||
|
||||
@Inject
|
||||
MeterRegistry meters;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.base-url")
|
||||
String baseUrl;
|
||||
|
||||
/**
|
||||
* No query params → navigation document.
|
||||
* Any query param → device service search.
|
||||
*
|
||||
* An agent navigates here first, reads _links.search, fills the template,
|
||||
* then calls the resulting URL — which routes back to this same method.
|
||||
*/
|
||||
@GET
|
||||
public Response index(
|
||||
@QueryParam("capability") String capability,
|
||||
@QueryParam("deviceClass") String deviceClass,
|
||||
@QueryParam("protocol") String protocol) {
|
||||
|
||||
if (capability == null && deviceClass == null && protocol == null) {
|
||||
return Response.ok(buildIndex()).build();
|
||||
}
|
||||
var results = registryService
|
||||
.searchDevices(capability, deviceClass, protocol)
|
||||
.stream().map(ServiceResponse::from).toList();
|
||||
|
||||
meters.counter("apix.search.devices",
|
||||
"capability", tv(capability),
|
||||
"deviceClass", tv(deviceClass),
|
||||
"protocol", tv(protocol))
|
||||
.increment();
|
||||
meters.summary("apix.search.result_count",
|
||||
"resource", "devices",
|
||||
"capability", tv(capability))
|
||||
.record(results.size());
|
||||
|
||||
return Response.ok(results).build();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{id}")
|
||||
public ServiceResponse getById(@PathParam("id") UUID id) {
|
||||
return ServiceResponse.from(registryService.requireDeviceById(id));
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{id}/replacements")
|
||||
public ReplacementsResponse getReplacements(
|
||||
@PathParam("id") UUID id,
|
||||
@QueryParam("minOLevel") String minOLevel,
|
||||
@QueryParam("deviceClass") String deviceClass,
|
||||
@QueryParam("protocol") String protocol) {
|
||||
// iotReady=true is implicit: the /devices path only surfaces IoT-compatible candidates
|
||||
return registryService.getReplacements(id, minOLevel, true, deviceClass, protocol);
|
||||
}
|
||||
|
||||
private static String tv(String v) {
|
||||
if (v == null || v.isBlank()) return "_none";
|
||||
return v.length() > 64 ? v.substring(0, 64) : v;
|
||||
}
|
||||
|
||||
private DeviceIndexResponse buildIndex() {
|
||||
var links = new DeviceLinks(
|
||||
new LinkRef(baseUrl + "/devices"),
|
||||
new LinkRef(baseUrl + "/devices{?capability,deviceClass,protocol}", true),
|
||||
new LinkRef(baseUrl + "/devices/{id}/replacements{?deviceClass,protocol,minOLevel}", true)
|
||||
);
|
||||
return new DeviceIndexResponse(links);
|
||||
}
|
||||
}
|
||||
+68
@@ -0,0 +1,68 @@
|
||||
package org.botstandards.apix.registry.resource;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.HeaderParam;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import org.botstandards.apix.registry.dto.IndexResponse;
|
||||
import org.botstandards.apix.registry.dto.IndexResponse.RegistryLinks;
|
||||
import org.botstandards.apix.registry.dto.IndexResponse.RegistryLinks.LinkRef;
|
||||
import org.botstandards.apix.registry.dto.IndexResponse.RegistryStats;
|
||||
import org.botstandards.apix.registry.entity.SandboxEntity;
|
||||
import org.botstandards.apix.registry.service.RegistryService;
|
||||
import org.botstandards.apix.registry.service.SandboxService;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
|
||||
@Path("/")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public class IndexResource {
|
||||
|
||||
@Inject
|
||||
RegistryService registryService;
|
||||
|
||||
@Inject
|
||||
SandboxService sandboxService;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.base-url")
|
||||
String baseUrl;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.name", defaultValue = "APIX Registry")
|
||||
String registryName;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.description",
|
||||
defaultValue = "The open autonomous agent service discovery registry. " +
|
||||
"Follow _links.services to browse, or _links.servicesSearch to filter by capability.")
|
||||
String registryDescription;
|
||||
|
||||
@GET
|
||||
public IndexResponse index(@HeaderParam("X-Api-Key") String apiKey) {
|
||||
var stats = new RegistryStats(
|
||||
registryService.countAll(),
|
||||
registryService.countLive()
|
||||
);
|
||||
|
||||
LinkRef sandboxLink = null;
|
||||
if (apiKey != null && !apiKey.isBlank()) {
|
||||
SandboxEntity sandbox = sandboxService.findByKey(apiKey);
|
||||
if (sandbox != null) {
|
||||
sandboxLink = new LinkRef(baseUrl + "/sandbox/" + sandbox.name);
|
||||
}
|
||||
}
|
||||
|
||||
var links = new RegistryLinks(
|
||||
new LinkRef(baseUrl + "/"),
|
||||
new LinkRef(baseUrl + "/services"),
|
||||
new LinkRef(baseUrl + "/services{?capability,stage,property}", true),
|
||||
new LinkRef(baseUrl + "/devices"),
|
||||
new LinkRef(baseUrl + "/organizations"),
|
||||
new LinkRef(baseUrl + "/q/health"),
|
||||
new LinkRef(baseUrl + "/q/openapi"),
|
||||
new LinkRef(baseUrl + "/sandbox/register"),
|
||||
new LinkRef(baseUrl + "/sandbox/feedback-schema"),
|
||||
sandboxLink
|
||||
);
|
||||
return new IndexResponse("0.1", registryName, registryDescription, stats, links);
|
||||
}
|
||||
}
|
||||
+280
@@ -0,0 +1,280 @@
|
||||
package org.botstandards.apix.registry.resource;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.botstandards.apix.common.BsmPayload;
|
||||
import org.botstandards.apix.common.SandboxDashboardResponse;
|
||||
import org.botstandards.apix.registry.dto.*;
|
||||
import org.botstandards.apix.registry.entity.SandboxEntity;
|
||||
import org.botstandards.apix.registry.service.SandboxService;
|
||||
import org.botstandards.apix.registry.service.SandboxService.SandboxCreationResult;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.eclipse.microprofile.openapi.annotations.Operation;
|
||||
import org.eclipse.microprofile.openapi.annotations.security.SecurityRequirement;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
@Path("/sandbox")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
public class SandboxResource {
|
||||
|
||||
@Inject
|
||||
SandboxService sandboxService;
|
||||
|
||||
@Inject
|
||||
MeterRegistry meters;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.base-url")
|
||||
String baseUrl;
|
||||
|
||||
@ConfigProperty(name = "apix.portal.base-url", defaultValue = "")
|
||||
String portalBaseUrl;
|
||||
|
||||
@ConfigProperty(name = "apix.api-key", defaultValue = "")
|
||||
String adminApiKey;
|
||||
|
||||
@POST
|
||||
@Path("/register")
|
||||
@Operation(
|
||||
summary = "Create a sandbox namespace",
|
||||
description = "Creates an isolated test namespace. Returns the sandbox UUID and an API key " +
|
||||
"shown exactly once — store both immediately. The UUID is the permanent resource identifier " +
|
||||
"used in all subsequent requests. The name is a human-readable label only; it does not " +
|
||||
"need to be unique. Free tier: 30 days lifetime, 60 req/min."
|
||||
)
|
||||
public Response register(@Valid SandboxRegistrationRequest req) {
|
||||
SandboxCreationResult result = sandboxService.create(req.name(), req.contactEmail(), req.location());
|
||||
SandboxEntity sb = result.sandbox();
|
||||
|
||||
String dashboardUrl = portalBaseUrl.isBlank() ? null
|
||||
: portalBaseUrl + "/sandbox/" + sb.id;
|
||||
|
||||
var body = new SandboxRegistrationResponse(
|
||||
sb.id.toString(), sb.name, result.plainKey(), result.plainMaintenanceKey(),
|
||||
sb.tier, sb.ratePerMinute, sb.expiresAt,
|
||||
dashboardUrl,
|
||||
sandboxService.sandboxLinks(baseUrl + "/sandbox/" + sb.id, sb.id.toString()));
|
||||
|
||||
return Response.created(URI.create("/sandbox/" + sb.id)).entity(body).build();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}")
|
||||
@Operation(
|
||||
summary = "Sandbox root — HATEOAS navigation and dashboard data",
|
||||
description = "Returns sandbox metadata, navigation links, usage stats, and agent visit data. " +
|
||||
"No authentication required. The UUID is the permanent resource identifier."
|
||||
)
|
||||
public SandboxDashboardResponse index(@PathParam("uuid") String uuidStr) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxDashboardResponse dashboard = sandboxService.getDashboard(id);
|
||||
sandboxService.recordUsage(id.toString(), SandboxService.EVENT_SANDBOX_VIEWED);
|
||||
meters.counter("apix.sandbox.views", "sandbox", id.toString()).increment();
|
||||
return dashboard;
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/{uuid}/services")
|
||||
@SecurityRequirement(name = "SandboxApiKey")
|
||||
@Operation(
|
||||
summary = "Register a service in the sandbox",
|
||||
description = "Registers a test service. No KYC or O-level enforcement. " +
|
||||
"Requires X-Api-Key matching this sandbox's key."
|
||||
)
|
||||
public Response registerService(@PathParam("uuid") String uuidStr,
|
||||
@Valid BsmPayload payload,
|
||||
@HeaderParam("X-Api-Key") String apiKey,
|
||||
@HeaderParam("X-Forwarded-For") String forwardedFor) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireAuth(id, apiKey);
|
||||
var service = sandboxService.registerService(sb.id.toString(), payload);
|
||||
sandboxService.recordUsage(sb.id.toString(), SandboxService.EVENT_SERVICE_REGISTERED);
|
||||
sandboxService.recordAgentVisit(sb.id.toString(), forwardedFor);
|
||||
meters.counter("apix.sandbox.services.registered", "sandbox", sb.id.toString()).increment();
|
||||
return Response.created(URI.create("/sandbox/" + sb.id + "/services/" + service.id))
|
||||
.entity(Map.of("id", service.id.toString()))
|
||||
.build();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}/services")
|
||||
@Operation(
|
||||
summary = "List or search services in the sandbox",
|
||||
description = "Returns services registered in this sandbox. " +
|
||||
"Optionally filter by ?capability=, ?stage=, and ?property=key:value. " +
|
||||
"Multiple ?property= parameters are ANDed together. No authentication required."
|
||||
)
|
||||
public List<ServiceResponse> listServices(
|
||||
@PathParam("uuid") String uuidStr,
|
||||
@QueryParam("capability") String capability,
|
||||
@QueryParam("stage") String stage,
|
||||
@QueryParam("property") List<String> properties,
|
||||
@HeaderParam("X-Forwarded-For") String forwardedFor) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireById(id);
|
||||
if (capability != null && !capability.isBlank()) {
|
||||
sandboxService.recordUsage(sb.id.toString(), SandboxService.EVENT_SERVICE_SEARCHED);
|
||||
sandboxService.recordAgentVisit(sb.id.toString(), forwardedFor);
|
||||
meters.counter("apix.sandbox.services.searched", "sandbox", sb.id.toString()).increment();
|
||||
return sandboxService.searchServices(sb.id.toString(), capability, stage, properties);
|
||||
}
|
||||
sandboxService.recordUsage(sb.id.toString(), SandboxService.EVENT_SERVICE_LISTED);
|
||||
meters.counter("apix.sandbox.services.listed", "sandbox", sb.id.toString()).increment();
|
||||
return sandboxService.listServices(sb.id.toString());
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}/telemetry")
|
||||
@SecurityRequirement(name = "SandboxApiKey")
|
||||
@Operation(
|
||||
summary = "Sandbox usage statistics",
|
||||
description = "Returns cumulative request counts by event type, last activity timestamp, " +
|
||||
"and tier metadata. Requires X-Api-Key matching this sandbox's key."
|
||||
)
|
||||
public SandboxTelemetryResponse telemetry(@PathParam("uuid") String uuidStr,
|
||||
@HeaderParam("X-Api-Key") String apiKey) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireAuth(id, apiKey);
|
||||
return sandboxService.getTelemetry(sb, baseUrl);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/feedback-schema")
|
||||
@Operation(
|
||||
summary = "Agent experience feedback schema",
|
||||
description = "Returns the rated dimensions with question text and scale labels. " +
|
||||
"Agents read this before submitting feedback. No authentication required."
|
||||
)
|
||||
public FeedbackSchemaResponse feedbackSchema() {
|
||||
return SandboxService.feedbackSchema(baseUrl);
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/{uuid}/feedback")
|
||||
@Operation(
|
||||
summary = "Submit agent experience feedback",
|
||||
description = "Records dimension scores (0–10) for this sandbox. No authentication required. " +
|
||||
"Unknown dimension keys are ignored. At least one valid dimension key is required."
|
||||
)
|
||||
public Response submitFeedback(@PathParam("uuid") String uuidStr,
|
||||
@Valid FeedbackSubmissionRequest req) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireById(id);
|
||||
sandboxService.submitFeedback(sb.id.toString(), req);
|
||||
meters.counter("apix.sandbox.feedback.submitted", "sandbox", sb.id.toString()).increment();
|
||||
return Response.accepted()
|
||||
.entity(Map.of(
|
||||
"message", "Feedback recorded. Thank you.",
|
||||
"_links", Map.of(
|
||||
"schema", baseUrl + "/sandbox/feedback-schema",
|
||||
"sandbox", baseUrl + "/sandbox/" + sb.id)))
|
||||
.build();
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/{uuid}/feedback")
|
||||
@SecurityRequirement(name = "SandboxApiKey")
|
||||
@Operation(
|
||||
summary = "Aggregated feedback results",
|
||||
description = "Returns average scores per dimension across all submissions for this sandbox. " +
|
||||
"Requires X-Api-Key matching this sandbox's key."
|
||||
)
|
||||
public FeedbackAggregateResponse getFeedback(@PathParam("uuid") String uuidStr,
|
||||
@HeaderParam("X-Api-Key") String apiKey) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireAuth(id, apiKey);
|
||||
return sandboxService.getAggregatedFeedback(sb);
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/admin/{uuid}/tier")
|
||||
@Operation(summary = "Promote sandbox tier (admin only)",
|
||||
description = "Changes a sandbox tier, updating rate limits, caps, and expiry. " +
|
||||
"Requires X-Admin-Key matching the global registry admin key.")
|
||||
public Response promoteTier(@PathParam("uuid") String uuidStr,
|
||||
@HeaderParam("X-Admin-Key") String adminKey,
|
||||
Map<String, String> body) {
|
||||
if (adminApiKey.isBlank() || !adminApiKey.equals(adminKey)) {
|
||||
return Response.status(401)
|
||||
.entity(Map.of("message", "Invalid or missing admin key"))
|
||||
.build();
|
||||
}
|
||||
String newTier = body == null ? null : body.get("tier");
|
||||
if (newTier == null || newTier.isBlank()) {
|
||||
return Response.status(400)
|
||||
.entity(Map.of("message", "Body must contain 'tier'"))
|
||||
.build();
|
||||
}
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireById(id);
|
||||
sandboxService.promoteTier(sb, newTier.toUpperCase());
|
||||
return Response.ok(Map.of(
|
||||
"sandboxId", sb.id.toString(),
|
||||
"tier", sb.tier,
|
||||
"expiresAt", sb.expiresAt.toString(),
|
||||
"ratePerMinute", sb.ratePerMinute))
|
||||
.build();
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/{uuid}/extend")
|
||||
@SecurityRequirement(name = "SandboxMaintenanceKey")
|
||||
@Operation(
|
||||
summary = "Extend sandbox expiry",
|
||||
description = "Resets the expiry to now + tier lifetime (e.g. 30 days for FREE). " +
|
||||
"Can be called on an already-expired sandbox to reactivate it. " +
|
||||
"Requires X-Maintenance-Key returned at registration."
|
||||
)
|
||||
public Response extend(@PathParam("uuid") String uuidStr,
|
||||
@HeaderParam("X-Maintenance-Key") String maintenanceKey) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireMaintenanceAuth(id, maintenanceKey);
|
||||
java.time.Instant newExpiry = sandboxService.extendExpiry(sb);
|
||||
return Response.ok(Map.of(
|
||||
"expiresAt", newExpiry.toString(),
|
||||
"_links", Map.of("sandbox", baseUrl + "/sandbox/" + sb.id)))
|
||||
.build();
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/{uuid}/api-key")
|
||||
@SecurityRequirement(name = "SandboxMaintenanceKey")
|
||||
@Operation(
|
||||
summary = "Rotate the sandbox API key",
|
||||
description = "Invalidates the current API key and issues a new one. " +
|
||||
"All agents must be updated with the new key. " +
|
||||
"The new key is shown exactly once — store it immediately. " +
|
||||
"Requires X-Maintenance-Key returned at registration."
|
||||
)
|
||||
public Response rotateApiKey(@PathParam("uuid") String uuidStr,
|
||||
@HeaderParam("X-Maintenance-Key") String maintenanceKey) {
|
||||
UUID id = parseUuid(uuidStr);
|
||||
SandboxEntity sb = sandboxService.requireMaintenanceAuth(id, maintenanceKey);
|
||||
String newKey = sandboxService.rotateApiKey(sb);
|
||||
return Response.ok(Map.of(
|
||||
"apiKey", newKey,
|
||||
"message", "New API key issued. Store it immediately — it will not be shown again.",
|
||||
"_links", Map.of("sandbox", baseUrl + "/sandbox/" + sb.id)))
|
||||
.build();
|
||||
}
|
||||
|
||||
// ── Private ───────────────────────────────────────────────────────────────
|
||||
|
||||
/** Returns 404 (not 400) for an unparseable UUID — it simply doesn't exist as a resource. */
|
||||
private UUID parseUuid(String s) {
|
||||
try {
|
||||
return UUID.fromString(s);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new WebApplicationException(
|
||||
Response.status(404).entity(Map.of("message", "Sandbox not found")).build());
|
||||
}
|
||||
}
|
||||
}
|
||||
+83
-3
@@ -1,5 +1,6 @@
|
||||
package org.botstandards.apix.registry.resource;
|
||||
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.ws.rs.*;
|
||||
@@ -12,6 +13,9 @@ import org.botstandards.apix.registry.dto.ServiceResponse;
|
||||
import org.botstandards.apix.registry.dto.VersionHistoryEntry;
|
||||
import org.botstandards.apix.registry.service.RegistryService;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.eclipse.microprofile.openapi.annotations.Operation;
|
||||
import org.eclipse.microprofile.openapi.annotations.parameters.Parameter;
|
||||
import org.eclipse.microprofile.openapi.annotations.security.SecurityRequirement;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
@@ -26,10 +30,20 @@ public class ServiceResource {
|
||||
@Inject
|
||||
RegistryService registryService;
|
||||
|
||||
@Inject
|
||||
MeterRegistry meters;
|
||||
|
||||
@ConfigProperty(name = "apix.api-key")
|
||||
String apiKey;
|
||||
|
||||
@POST
|
||||
@SecurityRequirement(name = "ApiKey")
|
||||
@Operation(
|
||||
summary = "Register or update a service",
|
||||
description = "Registers a new service or updates an existing one (UPSERT keyed on endpoint URL). " +
|
||||
"Requires the X-Api-Key header. Returns the UUID of the created or updated service. " +
|
||||
"The service starts at stage=DEVELOPMENT and oLevel=UNVERIFIED unless specified in the payload."
|
||||
)
|
||||
public Response register(@Valid BsmPayload payload, @HeaderParam("X-Api-Key") String key) {
|
||||
requireKey(key);
|
||||
var service = registryService.register(payload);
|
||||
@@ -40,12 +54,23 @@ public class ServiceResource {
|
||||
|
||||
@GET
|
||||
@Path("/{id}")
|
||||
@Operation(
|
||||
summary = "Get service by ID",
|
||||
description = "Returns the full service record including BSM payload, verification level, liveness status, and lifecycle metadata."
|
||||
)
|
||||
public ServiceResponse getById(@PathParam("id") UUID id) {
|
||||
return ServiceResponse.from(registryService.requireById(id));
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/{id}")
|
||||
@SecurityRequirement(name = "ApiKey")
|
||||
@Operation(
|
||||
summary = "Update service metadata",
|
||||
description = "Partially updates a service record. Only fields present in the request body are changed. " +
|
||||
"Stage transitions (e.g. DEVELOPMENT → PRODUCTION, PRODUCTION → DEPRECATED) are validated: " +
|
||||
"transitioning to DEPRECATED requires sunsetAt to be set; DECOMMISSIONED requires sunsetAt to have passed."
|
||||
)
|
||||
public ServiceResponse patch(@PathParam("id") UUID id,
|
||||
ServicePatchRequest req,
|
||||
@HeaderParam("X-Api-Key") String key) {
|
||||
@@ -54,19 +79,54 @@ public class ServiceResource {
|
||||
}
|
||||
|
||||
@GET
|
||||
public List<ServiceResponse> search(@QueryParam("capability") String capability,
|
||||
@QueryParam("stage") String stage) {
|
||||
@Operation(
|
||||
summary = "Search services by capability and optional extension properties",
|
||||
description = "Returns services matching the given capability keyword. " +
|
||||
"Omitting ?stage= defaults to PRODUCTION only — this is the standard agent query. " +
|
||||
"Use ?stage=DEVELOPMENT or ?stage=BETA to discover pre-production services. " +
|
||||
"Capability values are lowercase kebab-case strings defined by the registrant (e.g. nlp, translation, speech-to-text). " +
|
||||
"The search matches exact capability tokens, not substrings. " +
|
||||
"Use ?property=key:value to filter by custom extension properties stored in the service's 'extensions' object. " +
|
||||
"Multiple ?property= parameters are ANDed together. " +
|
||||
"Example: ?capability=translation&property=region:eu&property=dataResidency:DE"
|
||||
)
|
||||
public List<ServiceResponse> search(
|
||||
@Parameter(description = "Capability keyword to search for (e.g. nlp, translation, speech-to-text). Required.", example = "nlp")
|
||||
@QueryParam("capability") String capability,
|
||||
@Parameter(description = "Lifecycle stage filter. Defaults to PRODUCTION if omitted. Valid values: DEVELOPMENT, BETA, PRODUCTION, DEPRECATED.", example = "PRODUCTION")
|
||||
@QueryParam("stage") String stage,
|
||||
@Parameter(description = "Extension property filter in key:value format. Matches against the service's extensions object. Repeatable for AND logic.", example = "region:eu")
|
||||
@QueryParam("property") List<String> properties) {
|
||||
if (capability == null || capability.isBlank()) {
|
||||
throw new BadRequestException("capability query parameter is required");
|
||||
}
|
||||
return registryService.search(capability, stage).stream()
|
||||
var results = registryService.search(capability, stage, properties).stream()
|
||||
.map(ServiceResponse::from)
|
||||
.toList();
|
||||
|
||||
String stageTag = (stage != null && !stage.isBlank()) ? stage.toUpperCase() : "PRODUCTION";
|
||||
meters.counter("apix.search.services",
|
||||
"capability", tv(capability),
|
||||
"stage", stageTag)
|
||||
.increment();
|
||||
meters.summary("apix.search.result_count",
|
||||
"resource", "services",
|
||||
"capability", tv(capability))
|
||||
.record(results.size());
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@PATCH
|
||||
@Path("/{id}/olevel")
|
||||
@SecurityRequirement(name = "ApiKey")
|
||||
@Operation(
|
||||
summary = "Set verification level",
|
||||
description = "Assigns an O-level to a service after the registry operator has completed verification. " +
|
||||
"O-levels progress from UNVERIFIED → IDENTITY_VERIFIED → LEGAL_ENTITY_VERIFIED → HYGIENE_VERIFIED → OPERATIONALLY_VERIFIED → AUDITED."
|
||||
)
|
||||
public ServiceResponse setOLevel(@PathParam("id") UUID id,
|
||||
@Parameter(description = "Target O-level value.", example = "IDENTITY_VERIFIED")
|
||||
@QueryParam("level") String level,
|
||||
@HeaderParam("X-Api-Key") String key) {
|
||||
requireKey(key);
|
||||
@@ -76,10 +136,20 @@ public class ServiceResource {
|
||||
|
||||
@GET
|
||||
@Path("/{id}/replacements")
|
||||
@Operation(
|
||||
summary = "Find replacement services",
|
||||
description = "Returns services that declare themselves as replacements for the given service ID. " +
|
||||
"Use this when a service is DEPRECATED or DECOMMISSIONED to find the recommended migration target. " +
|
||||
"Optionally filter by minimum O-level, IoT readiness, device class, or protocol."
|
||||
)
|
||||
public Response getReplacements(@PathParam("id") UUID id,
|
||||
@Parameter(description = "Minimum O-level of replacement candidates.", example = "IDENTITY_VERIFIED")
|
||||
@QueryParam("minOLevel") String minOLevel,
|
||||
@Parameter(description = "If true, only return services with an IoT profile.")
|
||||
@QueryParam("iotReady") Boolean iotReady,
|
||||
@Parameter(description = "Filter by IoT device class (e.g. sensor, actuator, gateway).")
|
||||
@QueryParam("deviceClass") String deviceClass,
|
||||
@Parameter(description = "Filter by IoT protocol (e.g. MQTT, AMQP, HTTP).")
|
||||
@QueryParam("protocol") String protocol) {
|
||||
ReplacementsResponse body = registryService.getReplacements(id, minOLevel, iotReady, deviceClass, protocol);
|
||||
return Response.ok(body)
|
||||
@@ -89,10 +159,20 @@ public class ServiceResource {
|
||||
|
||||
@GET
|
||||
@Path("/{id}/history")
|
||||
@Operation(
|
||||
summary = "Get version history",
|
||||
description = "Returns the audit trail of all changes to a service record — registrations, BSM updates, stage transitions, O-level assignments, and sunset declarations — in reverse chronological order."
|
||||
)
|
||||
public List<VersionHistoryEntry> getHistory(@PathParam("id") UUID id) {
|
||||
return registryService.getHistory(id);
|
||||
}
|
||||
|
||||
// Cap tag values to prevent high-cardinality explosion from malformed inputs
|
||||
private static String tv(String v) {
|
||||
if (v == null || v.isBlank()) return "_none";
|
||||
return v.length() > 64 ? v.substring(0, 64) : v;
|
||||
}
|
||||
|
||||
private void requireKey(String provided) {
|
||||
if (!apiKey.equals(provided)) {
|
||||
throw new NotAuthorizedException(
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
package org.botstandards.apix.registry.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URLEncoder;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Optional;
|
||||
|
||||
@ApplicationScoped
|
||||
public class GeoService {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(GeoService.class);
|
||||
|
||||
@Inject
|
||||
ObjectMapper mapper;
|
||||
|
||||
private final HttpClient http = HttpClient.newBuilder()
|
||||
.connectTimeout(Duration.ofSeconds(3))
|
||||
.build();
|
||||
|
||||
/**
|
||||
* Geocodes a free-text location string (e.g. "Berlin, Germany") to lat/lon via Nominatim.
|
||||
* Returns empty if the location is null, blank, or the lookup fails.
|
||||
*/
|
||||
public Optional<double[]> geocodeLocation(String location) {
|
||||
if (location == null || location.isBlank()) return Optional.empty();
|
||||
try {
|
||||
String encoded = URLEncoder.encode(location.trim(), StandardCharsets.UTF_8);
|
||||
HttpRequest req = HttpRequest.newBuilder(
|
||||
URI.create("https://nominatim.openstreetmap.org/search?q=" + encoded + "&format=json&limit=1"))
|
||||
.header("User-Agent", "APIX-Registry/1.0 (api-index.org)")
|
||||
.timeout(Duration.ofSeconds(5))
|
||||
.GET()
|
||||
.build();
|
||||
HttpResponse<String> resp = http.send(req, HttpResponse.BodyHandlers.ofString());
|
||||
if (resp.statusCode() != 200) return Optional.empty();
|
||||
JsonNode root = mapper.readTree(resp.body());
|
||||
if (root.isEmpty()) return Optional.empty();
|
||||
JsonNode first = root.get(0);
|
||||
return Optional.of(new double[]{
|
||||
first.get("lat").asDouble(),
|
||||
first.get("lon").asDouble()
|
||||
});
|
||||
} catch (Exception e) {
|
||||
LOG.warnf("Geocoding failed for '%s': %s", location, e.getMessage());
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a client IP (from X-Forwarded-For) to lat/lon via ip-api.com.
|
||||
* Returns empty for private/loopback IPs, null input, or lookup failure.
|
||||
* Raw IP is passed to ip-api.com only; the returned coordinates are what gets persisted.
|
||||
*/
|
||||
public Optional<double[]> geolocateIp(String xForwardedFor) {
|
||||
String ip = extractClientIp(xForwardedFor);
|
||||
if (ip == null || isPrivateOrLoopback(ip)) return Optional.empty();
|
||||
try {
|
||||
HttpRequest req = HttpRequest.newBuilder(
|
||||
URI.create("http://ip-api.com/json/" + ip + "?fields=status,lat,lon"))
|
||||
.timeout(Duration.ofSeconds(3))
|
||||
.GET()
|
||||
.build();
|
||||
HttpResponse<String> resp = http.send(req, HttpResponse.BodyHandlers.ofString());
|
||||
if (resp.statusCode() != 200) return Optional.empty();
|
||||
JsonNode node = mapper.readTree(resp.body());
|
||||
if (!"success".equals(node.path("status").asText())) return Optional.empty();
|
||||
return Optional.of(new double[]{
|
||||
node.get("lat").asDouble(),
|
||||
node.get("lon").asDouble()
|
||||
});
|
||||
} catch (Exception e) {
|
||||
LOG.debugf("IP geolocation failed for %s: %s", ip, e.getMessage());
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
/** Takes the leftmost IP from a comma-separated X-Forwarded-For header. */
|
||||
public static String extractClientIp(String xForwardedFor) {
|
||||
if (xForwardedFor == null || xForwardedFor.isBlank()) return null;
|
||||
return xForwardedFor.split(",")[0].trim();
|
||||
}
|
||||
|
||||
static boolean isPrivateOrLoopback(String ip) {
|
||||
if (ip.equals("::1") || ip.startsWith("127.") || ip.equalsIgnoreCase("localhost")) return true;
|
||||
if (ip.startsWith("10.") || ip.startsWith("192.168.")) return true;
|
||||
// 172.16.0.0/12
|
||||
if (ip.startsWith("172.")) {
|
||||
String[] parts = ip.split("\\.");
|
||||
if (parts.length >= 2) {
|
||||
try {
|
||||
int second = Integer.parseInt(parts[1]);
|
||||
if (second >= 16 && second <= 31) return true;
|
||||
} catch (NumberFormatException ignored) {}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
+1
-1
@@ -594,7 +594,7 @@ public class OrganizationService {
|
||||
|
||||
private void resetTanCountIfNeeded(OrganizationEntity org, Instant now) {
|
||||
if (org.tanLastRequestedAt != null
|
||||
&& Duration.between(org.tanLastRequestedAt, now).toHours() >= 24) {
|
||||
&& Duration.between(org.tanLastRequestedAt, now).compareTo(Duration.ofHours(24)) > 0) {
|
||||
org.tanRequestCount24h = 0;
|
||||
}
|
||||
}
|
||||
|
||||
+76
-8
@@ -22,6 +22,7 @@ import org.botstandards.apix.registry.entity.ServiceVersionEntity;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
import java.util.stream.Stream;
|
||||
import jakarta.persistence.Query;
|
||||
|
||||
@ApplicationScoped
|
||||
public class RegistryService {
|
||||
@@ -35,7 +36,7 @@ public class RegistryService {
|
||||
@Transactional
|
||||
public ServiceEntity register(BsmPayload payload) {
|
||||
long existing = ((Number) em.createNativeQuery(
|
||||
"SELECT COUNT(*) FROM services WHERE endpoint_url = :url")
|
||||
"SELECT COUNT(*) FROM services WHERE endpoint_url = :url AND sandbox_id IS NULL")
|
||||
.setParameter("url", payload.endpoint())
|
||||
.getSingleResult()).longValue();
|
||||
if (existing > 0) {
|
||||
@@ -126,20 +127,42 @@ public class RegistryService {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ServiceEntity> search(String capability, String stage) {
|
||||
public List<ServiceEntity> search(String capability, String stage, List<String> properties) {
|
||||
ServiceStage targetStage = stage != null
|
||||
? ServiceStage.valueOf(stage.toUpperCase())
|
||||
: ServiceStage.PRODUCTION;
|
||||
|
||||
return em.createNativeQuery(
|
||||
List<String[]> props = parsePropertyFilters(properties);
|
||||
StringBuilder sql = new StringBuilder(
|
||||
"SELECT s.* FROM services s " +
|
||||
"WHERE s.bsm_payload @> jsonb_build_object('capabilities', jsonb_build_array(:cap)) " +
|
||||
"AND s.registry_status = 'ACTIVE' " +
|
||||
"AND s.service_stage = :stage",
|
||||
ServiceEntity.class)
|
||||
"AND s.service_stage = :stage " +
|
||||
"AND s.sandbox_id IS NULL");
|
||||
for (int i = 0; i < props.size(); i++) {
|
||||
sql.append(" AND s.bsm_payload -> 'extensions' ->> :propKey").append(i)
|
||||
.append(" = :propValue").append(i);
|
||||
}
|
||||
|
||||
Query q = em.createNativeQuery(sql.toString(), ServiceEntity.class)
|
||||
.setParameter("cap", capability)
|
||||
.setParameter("stage", targetStage.name())
|
||||
.getResultList();
|
||||
.setParameter("stage", targetStage.name());
|
||||
for (int i = 0; i < props.size(); i++) {
|
||||
q.setParameter("propKey" + i, props.get(i)[0]);
|
||||
q.setParameter("propValue" + i, props.get(i)[1]);
|
||||
}
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
static List<String[]> parsePropertyFilters(List<String> properties) {
|
||||
if (properties == null || properties.isEmpty()) return List.of();
|
||||
List<String[]> result = new ArrayList<>();
|
||||
for (String p : properties) {
|
||||
int colon = p.indexOf(':');
|
||||
if (colon < 1) continue;
|
||||
result.add(new String[]{ p.substring(0, colon), p.substring(colon + 1) });
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -190,6 +213,50 @@ public class RegistryService {
|
||||
);
|
||||
}
|
||||
|
||||
public ServiceEntity requireDeviceById(UUID id) {
|
||||
ServiceEntity e = requireById(id);
|
||||
if (e.iotProfile == null) {
|
||||
throw new NotFoundException("Device service not found: " + id);
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ServiceEntity> searchDevices(String capability, String deviceClass, String protocol) {
|
||||
StringBuilder sql = new StringBuilder(
|
||||
"SELECT s.* FROM services s " +
|
||||
"INNER JOIN iot_profiles ip ON ip.service_id = s.id " +
|
||||
"WHERE s.registry_status = 'ACTIVE' AND s.service_stage = 'PRODUCTION'");
|
||||
if (capability != null) {
|
||||
sql.append(" AND s.bsm_payload @> jsonb_build_object('capabilities', jsonb_build_array(:cap))");
|
||||
}
|
||||
jakarta.persistence.Query q = em.createNativeQuery(sql.toString(), ServiceEntity.class);
|
||||
if (capability != null) q.setParameter("cap", capability);
|
||||
|
||||
Stream<ServiceEntity> stream = ((List<ServiceEntity>) q.getResultList()).stream();
|
||||
if (deviceClass != null) {
|
||||
stream = stream.filter(s -> s.iotProfile.deviceClasses != null
|
||||
&& s.iotProfile.deviceClasses.contains(deviceClass));
|
||||
}
|
||||
if (protocol != null) {
|
||||
stream = stream.filter(s -> s.iotProfile.protocols != null
|
||||
&& s.iotProfile.protocols.contains(protocol));
|
||||
}
|
||||
return stream.toList();
|
||||
}
|
||||
|
||||
public long countAll() {
|
||||
return ((Number) em.createNativeQuery(
|
||||
"SELECT COUNT(*) FROM services WHERE registry_status = 'ACTIVE' AND sandbox_id IS NULL")
|
||||
.getSingleResult()).longValue();
|
||||
}
|
||||
|
||||
public long countLive() {
|
||||
return ((Number) em.createNativeQuery(
|
||||
"SELECT COUNT(*) FROM services WHERE registry_status = 'ACTIVE' AND liveness_status = 'UP' AND sandbox_id IS NULL")
|
||||
.getSingleResult()).longValue();
|
||||
}
|
||||
|
||||
public List<VersionHistoryEntry> getHistory(UUID id) {
|
||||
requireById(id);
|
||||
|
||||
@@ -281,7 +348,8 @@ public class RegistryService {
|
||||
r.locked() != null ? r.locked() : old.locked(),
|
||||
r.sunsetAt() != null ? r.sunsetAt() : old.sunsetAt(),
|
||||
r.migrationGuideUrl() != null ? r.migrationGuideUrl() : old.migrationGuideUrl(),
|
||||
r.replacesServiceIds() != null ? r.replacesServiceIds() : old.replacesServiceIds()
|
||||
r.replacesServiceIds() != null ? r.replacesServiceIds() : old.replacesServiceIds(),
|
||||
old.extensions()
|
||||
);
|
||||
if (r.endpoint() != null) e.endpointUrl = r.endpoint();
|
||||
if (r.registrantOrgType() != null) e.registrantOrgType = r.registrantOrgType();
|
||||
|
||||
+582
@@ -0,0 +1,582 @@
|
||||
package org.botstandards.apix.registry.service;
|
||||
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.persistence.EntityManager;
|
||||
import jakarta.transaction.Transactional;
|
||||
import jakarta.ws.rs.WebApplicationException;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.botstandards.apix.common.*;
|
||||
import org.botstandards.apix.registry.dto.*;
|
||||
import org.botstandards.apix.registry.dto.SandboxLinks.LinkRef;
|
||||
import org.botstandards.apix.registry.entity.SandboxEntity;
|
||||
import org.botstandards.apix.registry.entity.ServiceEntity;
|
||||
import org.botstandards.apix.registry.entity.ServiceVersionEntity;
|
||||
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.SecureRandom;
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.*;
|
||||
import java.util.HexFormat;
|
||||
|
||||
@ApplicationScoped
|
||||
public class SandboxService {
|
||||
|
||||
public static final String EVENT_SERVICE_REGISTERED = "SERVICE_REGISTERED";
|
||||
public static final String EVENT_SERVICE_LISTED = "SERVICE_LISTED";
|
||||
public static final String EVENT_SERVICE_SEARCHED = "SERVICE_SEARCHED";
|
||||
public static final String EVENT_SANDBOX_VIEWED = "SANDBOX_VIEWED";
|
||||
|
||||
@Inject
|
||||
EntityManager em;
|
||||
|
||||
@Inject
|
||||
GeoService geoService;
|
||||
|
||||
@ConfigProperty(name = "apix.registry.base-url")
|
||||
String baseUrl;
|
||||
|
||||
@ConfigProperty(name = "apix.portal.base-url", defaultValue = "")
|
||||
String portalBaseUrl;
|
||||
|
||||
@Transactional
|
||||
public SandboxCreationResult create(String name, String contactEmail, String location) {
|
||||
String plainKey = generateKey("apix_sb_");
|
||||
String plainMaintenanceKey = generateKey("apix_mk_");
|
||||
|
||||
SandboxEntity sandbox = new SandboxEntity();
|
||||
sandbox.id = UUID.randomUUID();
|
||||
sandbox.name = name;
|
||||
sandbox.contactEmail = contactEmail;
|
||||
sandbox.apiKeyHash = hash(plainKey);
|
||||
sandbox.maintenanceKeyHash = hash(plainMaintenanceKey);
|
||||
sandbox.tier = "FREE";
|
||||
sandbox.ratePerMinute = ratePerMinute("FREE");
|
||||
sandbox.maxServices = maxServices("FREE");
|
||||
sandbox.maxOrgs = maxOrgs("FREE");
|
||||
sandbox.createdAt = Instant.now();
|
||||
sandbox.expiresAt = sandbox.createdAt.plus(lifetimeDays("FREE"), ChronoUnit.DAYS);
|
||||
|
||||
if (location != null && !location.isBlank()) {
|
||||
sandbox.registrarLocation = location.trim();
|
||||
geoService.geocodeLocation(location).ifPresent(coords -> {
|
||||
sandbox.registrarLat = coords[0];
|
||||
sandbox.registrarLon = coords[1];
|
||||
});
|
||||
}
|
||||
|
||||
em.persist(sandbox);
|
||||
return new SandboxCreationResult(sandbox, plainKey, plainMaintenanceKey);
|
||||
}
|
||||
|
||||
/** Records an agent visit geo-point. Raw IP is resolved and discarded; only coordinates are stored. */
|
||||
@Transactional
|
||||
public void recordAgentVisit(String sandboxId, String xForwardedFor) {
|
||||
geoService.geolocateIp(xForwardedFor).ifPresent(coords ->
|
||||
em.createNativeQuery(
|
||||
"INSERT INTO sandbox_agent_visits (id, sandbox_id, agent_lat, agent_lon, visited_at) " +
|
||||
"VALUES (gen_random_uuid(), :sid, :lat, :lon, now())")
|
||||
.setParameter("sid", sandboxId)
|
||||
.setParameter("lat", coords[0])
|
||||
.setParameter("lon", coords[1])
|
||||
.executeUpdate()
|
||||
);
|
||||
}
|
||||
|
||||
/** Returns the sandbox if the key is valid and not expired, null if key unknown. */
|
||||
public SandboxEntity findByKey(String plainKey) {
|
||||
if (plainKey == null || plainKey.isBlank()) return null;
|
||||
String keyHash = hash(plainKey);
|
||||
List<SandboxEntity> results = em.createQuery(
|
||||
"FROM SandboxEntity s WHERE s.apiKeyHash = :hash",
|
||||
SandboxEntity.class)
|
||||
.setParameter("hash", keyHash)
|
||||
.getResultList();
|
||||
if (results.isEmpty()) return null;
|
||||
SandboxEntity sandbox = results.get(0);
|
||||
if (Instant.now().isAfter(sandbox.expiresAt)) return null;
|
||||
return sandbox;
|
||||
}
|
||||
|
||||
public SandboxEntity requireById(UUID id) {
|
||||
SandboxEntity s = em.find(SandboxEntity.class, id);
|
||||
if (s == null) {
|
||||
throw new WebApplicationException(
|
||||
Response.status(404).entity(Map.of("message", "Sandbox not found")).build());
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
/** Validates that the presented key belongs to this sandbox UUID. */
|
||||
public SandboxEntity requireAuth(UUID id, String plainKey) {
|
||||
SandboxEntity sandbox = requireById(id);
|
||||
if (plainKey == null || !sandbox.apiKeyHash.equals(hash(plainKey))) {
|
||||
throw new WebApplicationException(
|
||||
Response.status(401)
|
||||
.entity(Map.of("message", "Invalid or missing sandbox API key"))
|
||||
.build());
|
||||
}
|
||||
if (Instant.now().isAfter(sandbox.expiresAt)) {
|
||||
throw new WebApplicationException(
|
||||
Response.status(402)
|
||||
.entity(Map.of("message", "Sandbox has expired — upgrade your tier to continue"))
|
||||
.build());
|
||||
}
|
||||
return sandbox;
|
||||
}
|
||||
|
||||
/** Validates the maintenance key; does NOT check expiry — owner must be able to extend an expired sandbox. */
|
||||
public SandboxEntity requireMaintenanceAuth(UUID id, String plainMaintenanceKey) {
|
||||
SandboxEntity sandbox = requireById(id);
|
||||
if (plainMaintenanceKey == null || !sandbox.maintenanceKeyHash.equals(hash(plainMaintenanceKey))) {
|
||||
throw new WebApplicationException(
|
||||
Response.status(401)
|
||||
.entity(Map.of("message", "Invalid or missing maintenance key"))
|
||||
.build());
|
||||
}
|
||||
return sandbox;
|
||||
}
|
||||
|
||||
/** Extends sandbox expiry to now + tier lifetime. Returns the new expiresAt. */
|
||||
@Transactional
|
||||
public Instant extendExpiry(SandboxEntity sb) {
|
||||
sb.expiresAt = Instant.now().plus(lifetimeDays(sb.tier), ChronoUnit.DAYS);
|
||||
em.merge(sb);
|
||||
return sb.expiresAt;
|
||||
}
|
||||
|
||||
/** Upgrades a sandbox to the given tier, recalculating rate limits, caps, and expiry. */
|
||||
@Transactional
|
||||
public void promoteTier(SandboxEntity sb, String newTier) {
|
||||
sb.tier = newTier;
|
||||
sb.ratePerMinute = ratePerMinute(newTier);
|
||||
sb.maxServices = maxServices(newTier);
|
||||
sb.maxOrgs = maxOrgs(newTier);
|
||||
sb.expiresAt = Instant.now().plus(lifetimeDays(newTier), ChronoUnit.DAYS);
|
||||
em.merge(sb);
|
||||
}
|
||||
|
||||
/** Replaces the API key with a freshly generated one. Returns the new plaintext key (shown once). */
|
||||
@Transactional
|
||||
public String rotateApiKey(SandboxEntity sb) {
|
||||
String newKey = generateKey("apix_sb_");
|
||||
sb.apiKeyHash = hash(newKey);
|
||||
em.merge(sb);
|
||||
return newKey;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public ServiceEntity registerService(String sandboxId, BsmPayload payload) {
|
||||
SandboxEntity sandbox = em.find(SandboxEntity.class, UUID.fromString(sandboxId));
|
||||
if (sandbox == null) throw new WebApplicationException(
|
||||
Response.status(404).entity(Map.of("message", "Sandbox not found")).build());
|
||||
if (sandbox.maxServices != null) {
|
||||
long current = ((Number) em.createNativeQuery(
|
||||
"SELECT COUNT(*) FROM services WHERE sandbox_id = :sid AND registry_status = 'ACTIVE'")
|
||||
.setParameter("sid", sandboxId)
|
||||
.getSingleResult()).longValue();
|
||||
if (current >= sandbox.maxServices) {
|
||||
throw new WebApplicationException(Response.status(429)
|
||||
.entity(Map.of(
|
||||
"message", "Service limit reached for this sandbox tier",
|
||||
"limit", sandbox.maxServices,
|
||||
"tier", sandbox.tier))
|
||||
.build());
|
||||
}
|
||||
}
|
||||
Instant now = Instant.now();
|
||||
ServiceEntity service = new ServiceEntity();
|
||||
service.id = UUID.randomUUID();
|
||||
service.sandboxId = sandboxId;
|
||||
service.endpointUrl = payload.endpoint();
|
||||
service.bsmPayload = payload;
|
||||
service.olevel = OLevel.UNVERIFIED;
|
||||
service.livenessStatus = LivenessStatus.PENDING;
|
||||
service.registeredAt = now;
|
||||
service.registrantOrgType = payload.registrantOrgType() != null ? payload.registrantOrgType() : OrgType.INDIVIDUAL;
|
||||
service.serviceStage = payload.serviceStage() != null ? payload.serviceStage() : ServiceStage.DEVELOPMENT;
|
||||
service.registryStatus = RegistryStatus.ACTIVE;
|
||||
service.version = 1;
|
||||
|
||||
ServiceVersionEntity snapshot = new ServiceVersionEntity();
|
||||
snapshot.id = UUID.randomUUID();
|
||||
snapshot.serviceId = service.id;
|
||||
snapshot.version = 1;
|
||||
snapshot.recordedAt = now;
|
||||
snapshot.changeType = ChangeType.REGISTERED;
|
||||
snapshot.bsmPayload = payload;
|
||||
snapshot.registrantOrgType = service.registrantOrgType;
|
||||
snapshot.olevel = service.olevel;
|
||||
snapshot.serviceStage = service.serviceStage;
|
||||
snapshot.registryStatus = service.registryStatus;
|
||||
|
||||
em.persist(service);
|
||||
em.persist(snapshot);
|
||||
return service;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ServiceResponse> listServices(String sandboxId) {
|
||||
return ((List<ServiceEntity>) em.createNativeQuery(
|
||||
"SELECT s.* FROM services s WHERE s.sandbox_id = :sid AND s.registry_status = 'ACTIVE'",
|
||||
ServiceEntity.class)
|
||||
.setParameter("sid", sandboxId)
|
||||
.getResultList())
|
||||
.stream()
|
||||
.map(ServiceResponse::from)
|
||||
.toList();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ServiceResponse> searchServices(String sandboxId, String capability, String stage,
|
||||
List<String> properties) {
|
||||
ServiceStage targetStage = stage != null
|
||||
? ServiceStage.valueOf(stage.toUpperCase())
|
||||
: ServiceStage.DEVELOPMENT;
|
||||
|
||||
List<String[]> props = RegistryService.parsePropertyFilters(properties);
|
||||
StringBuilder sql = new StringBuilder(
|
||||
"SELECT s.* FROM services s " +
|
||||
"WHERE s.sandbox_id = :sid " +
|
||||
"AND s.registry_status = 'ACTIVE' " +
|
||||
"AND s.service_stage = :stage " +
|
||||
"AND s.bsm_payload @> jsonb_build_object('capabilities', jsonb_build_array(:cap))");
|
||||
for (int i = 0; i < props.size(); i++) {
|
||||
sql.append(" AND s.bsm_payload -> 'extensions' ->> :propKey").append(i)
|
||||
.append(" = :propValue").append(i);
|
||||
}
|
||||
|
||||
jakarta.persistence.Query q = em.createNativeQuery(sql.toString(), ServiceEntity.class)
|
||||
.setParameter("sid", sandboxId)
|
||||
.setParameter("stage", targetStage.name())
|
||||
.setParameter("cap", capability);
|
||||
for (int i = 0; i < props.size(); i++) {
|
||||
q.setParameter("propKey" + i, props.get(i)[0]);
|
||||
q.setParameter("propValue" + i, props.get(i)[1]);
|
||||
}
|
||||
return ((List<ServiceEntity>) q.getResultList()).stream()
|
||||
.map(ServiceResponse::from)
|
||||
.toList();
|
||||
}
|
||||
|
||||
// ── Telemetry ─────────────────────────────────────────────────────────────
|
||||
|
||||
@Transactional
|
||||
public void recordUsage(String sandboxId, String eventType) {
|
||||
em.createNativeQuery(
|
||||
"INSERT INTO sandbox_usage_stats (sandbox_id, event_type, request_count, last_requested_at) " +
|
||||
"VALUES (:sid, :type, 1, now()) " +
|
||||
"ON CONFLICT (sandbox_id, event_type) " +
|
||||
"DO UPDATE SET request_count = sandbox_usage_stats.request_count + 1, " +
|
||||
"last_requested_at = now()")
|
||||
.setParameter("sid", sandboxId)
|
||||
.setParameter("type", eventType)
|
||||
.executeUpdate();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SandboxTelemetryResponse getTelemetry(SandboxEntity sandbox, String baseUrl) {
|
||||
List<Object[]> rows = em.createNativeQuery(
|
||||
"SELECT event_type, request_count, last_requested_at " +
|
||||
"FROM sandbox_usage_stats WHERE sandbox_id = :sid")
|
||||
.setParameter("sid", sandbox.id.toString())
|
||||
.getResultList();
|
||||
|
||||
Map<String, Long> usage = new LinkedHashMap<>();
|
||||
Instant lastActivityAt = null;
|
||||
for (Object[] row : rows) {
|
||||
usage.put((String) row[0], ((Number) row[1]).longValue());
|
||||
Instant ts = row[2] instanceof java.sql.Timestamp t ? t.toInstant() : null;
|
||||
if (ts != null && (lastActivityAt == null || ts.isAfter(lastActivityAt))) {
|
||||
lastActivityAt = ts;
|
||||
}
|
||||
}
|
||||
|
||||
String base = baseUrl + "/sandbox/" + sandbox.id;
|
||||
SandboxLinks links = sandboxLinks(base, sandbox.id.toString());
|
||||
|
||||
return new SandboxTelemetryResponse(
|
||||
sandbox.id.toString(), sandbox.name, sandbox.tier,
|
||||
sandbox.ratePerMinute, sandbox.expiresAt,
|
||||
sandbox.maxServices, sandbox.maxOrgs,
|
||||
usage, lastActivityAt, links);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public SandboxDashboardResponse getDashboard(UUID id) {
|
||||
SandboxEntity sb = requireById(id);
|
||||
|
||||
List<Object[]> usageRows = em.createNativeQuery(
|
||||
"SELECT event_type, request_count, last_requested_at " +
|
||||
"FROM sandbox_usage_stats WHERE sandbox_id = :sid")
|
||||
.setParameter("sid", sb.id.toString())
|
||||
.getResultList();
|
||||
|
||||
Map<String, Long> usage = new LinkedHashMap<>();
|
||||
Instant lastActivityAt = null;
|
||||
for (Object[] row : usageRows) {
|
||||
usage.put((String) row[0], ((Number) row[1]).longValue());
|
||||
Instant ts = row[2] instanceof java.sql.Timestamp t ? t.toInstant() : null;
|
||||
if (ts != null && (lastActivityAt == null || ts.isAfter(lastActivityAt))) {
|
||||
lastActivityAt = ts;
|
||||
}
|
||||
}
|
||||
|
||||
List<Object[]> visitRows = em.createNativeQuery(
|
||||
"SELECT agent_lat, agent_lon, visited_at " +
|
||||
"FROM sandbox_agent_visits WHERE sandbox_id = :sid " +
|
||||
"ORDER BY visited_at DESC LIMIT 200")
|
||||
.setParameter("sid", sb.id.toString())
|
||||
.getResultList();
|
||||
|
||||
List<SandboxDashboardResponse.AgentVisit> visits = visitRows.stream()
|
||||
.map(row -> new SandboxDashboardResponse.AgentVisit(
|
||||
((Number) row[0]).doubleValue(),
|
||||
((Number) row[1]).doubleValue(),
|
||||
row[2] instanceof java.sql.Timestamp t ? t.toInstant() : Instant.now()))
|
||||
.toList();
|
||||
|
||||
return new SandboxDashboardResponse(
|
||||
sb.id.toString(), sb.name, sb.tier, sb.ratePerMinute,
|
||||
sb.maxServices, sb.maxOrgs, sb.createdAt, sb.expiresAt,
|
||||
sb.registrarLocation, sb.registrarLat, sb.registrarLon,
|
||||
usage, lastActivityAt, visits);
|
||||
}
|
||||
|
||||
// ── Feedback ──────────────────────────────────────────────────────────────
|
||||
|
||||
@Transactional
|
||||
public void submitFeedback(String sandboxId, FeedbackSubmissionRequest req) {
|
||||
Map<String, Integer> valid = new LinkedHashMap<>();
|
||||
for (var entry : req.scores().entrySet()) {
|
||||
if (KNOWN_DIMENSIONS.stream().anyMatch(d -> d.key().equals(entry.getKey()))) {
|
||||
int v = entry.getValue();
|
||||
if (v < 0 || v > 10) {
|
||||
throw new WebApplicationException(Response.status(422)
|
||||
.entity(Map.of("message",
|
||||
"Score for '" + entry.getKey() + "' must be 0–10"))
|
||||
.build());
|
||||
}
|
||||
valid.put(entry.getKey(), v);
|
||||
}
|
||||
}
|
||||
if (valid.isEmpty()) {
|
||||
throw new WebApplicationException(Response.status(422)
|
||||
.entity(Map.of("message", "No valid dimension keys submitted"))
|
||||
.build());
|
||||
}
|
||||
|
||||
try {
|
||||
String scoresJson = new com.fasterxml.jackson.databind.ObjectMapper()
|
||||
.writeValueAsString(valid);
|
||||
em.createNativeQuery(
|
||||
"INSERT INTO sandbox_feedback " +
|
||||
"(sandbox_id, scores, agent_identifier, comment, model_identifier, model_provider) " +
|
||||
"VALUES (:sid, :scores::jsonb, :agent, :comment, :modelId, :modelProvider)")
|
||||
.setParameter("sid", sandboxId)
|
||||
.setParameter("scores", scoresJson)
|
||||
.setParameter("agent", req.agentIdentifier())
|
||||
.setParameter("comment", req.comment())
|
||||
.setParameter("modelId", req.modelIdentifier())
|
||||
.setParameter("modelProvider", req.modelProvider())
|
||||
.executeUpdate();
|
||||
} catch (com.fasterxml.jackson.core.JsonProcessingException e) {
|
||||
throw new IllegalStateException("Failed to serialise feedback scores", e);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public FeedbackAggregateResponse getAggregatedFeedback(SandboxEntity sandbox) {
|
||||
long total = ((Number) em.createNativeQuery(
|
||||
"SELECT COUNT(*) FROM sandbox_feedback WHERE sandbox_id = :sid")
|
||||
.setParameter("sid", sandbox.id.toString())
|
||||
.getSingleResult()).longValue();
|
||||
|
||||
List<Object[]> rows = em.createNativeQuery(
|
||||
"SELECT kv.key, AVG((kv.value::text)::numeric), COUNT(*) " +
|
||||
"FROM sandbox_feedback f, " +
|
||||
"LATERAL jsonb_each(f.scores) AS kv(key, value) " +
|
||||
"WHERE f.sandbox_id = :sid " +
|
||||
"GROUP BY kv.key")
|
||||
.setParameter("sid", sandbox.id.toString())
|
||||
.getResultList();
|
||||
|
||||
List<FeedbackAggregateResponse.DimensionScore> scores = rows.stream()
|
||||
.map(row -> {
|
||||
String key = (String) row[0];
|
||||
double avg = ((Number) row[1]).doubleValue();
|
||||
int votes = ((Number) row[2]).intValue();
|
||||
String question = KNOWN_DIMENSIONS.stream()
|
||||
.filter(d -> d.key().equals(key))
|
||||
.map(FeedbackDimension::question)
|
||||
.findFirst()
|
||||
.orElse(key);
|
||||
return new FeedbackAggregateResponse.DimensionScore(key, question, avg, votes);
|
||||
})
|
||||
.toList();
|
||||
|
||||
List<Object[]> providerRows = em.createNativeQuery(
|
||||
"SELECT COALESCE(model_provider, 'unknown'), COUNT(*) " +
|
||||
"FROM sandbox_feedback WHERE sandbox_id = :sid " +
|
||||
"GROUP BY model_provider")
|
||||
.setParameter("sid", sandbox.id.toString())
|
||||
.getResultList();
|
||||
|
||||
Map<String, Integer> byProvider = new LinkedHashMap<>();
|
||||
for (Object[] row : providerRows) {
|
||||
byProvider.put((String) row[0], ((Number) row[1]).intValue());
|
||||
}
|
||||
|
||||
String base = baseUrl + "/sandbox/" + sandbox.id;
|
||||
return new FeedbackAggregateResponse(
|
||||
sandbox.id.toString(), sandbox.name, (int) total, scores, byProvider,
|
||||
sandboxLinks(base, sandbox.id.toString()));
|
||||
}
|
||||
|
||||
public static FeedbackSchemaResponse feedbackSchema(String baseUrl) {
|
||||
var scale = new FeedbackSchemaResponse.Scale(0, 10, "poor / unusable", "excellent / effortless");
|
||||
var schemaLinks = new FeedbackSchemaResponse.FeedbackSchemaLinks(
|
||||
new SandboxLinks.LinkRef(baseUrl + "/sandbox/feedback-schema"));
|
||||
return new FeedbackSchemaResponse(
|
||||
"Agent experience dimensions for APIX sandbox usage. " +
|
||||
"Submit scores to POST /sandbox/{uuid}/feedback. " +
|
||||
"All dimensions are optional — submit only those relevant to your usage. " +
|
||||
"The extension_property_coverage dimension is especially important: if the standard BSM fields " +
|
||||
"were insufficient and you had to use ?property= queries or store data in extensions, " +
|
||||
"a low score here signals a gap that should be standardised.",
|
||||
scale,
|
||||
KNOWN_DIMENSIONS,
|
||||
schemaLinks);
|
||||
}
|
||||
|
||||
public SandboxLinks sandboxLinks(String base) {
|
||||
return sandboxLinks(base, null);
|
||||
}
|
||||
|
||||
public SandboxLinks sandboxLinks(String base, String sandboxUuid) {
|
||||
SandboxLinks.LinkRef dashboard = (sandboxUuid != null && !portalBaseUrl.isBlank())
|
||||
? new SandboxLinks.LinkRef(portalBaseUrl + "/sandbox/" + sandboxUuid)
|
||||
: null;
|
||||
return new SandboxLinks(
|
||||
new LinkRef(base),
|
||||
new LinkRef(base + "/services"),
|
||||
new LinkRef(base + "/services{?capability,stage,property}", true),
|
||||
new LinkRef(base + "/feedback"),
|
||||
new LinkRef(baseUrl + "/sandbox/feedback-schema"),
|
||||
dashboard);
|
||||
}
|
||||
|
||||
// ── Statics ───────────────────────────────────────────────────────────────
|
||||
|
||||
public static final List<FeedbackDimension> KNOWN_DIMENSIONS = List.of(
|
||||
new FeedbackDimension(
|
||||
"hateoas_navigation",
|
||||
"Was HATEOAS navigation usable without prior documentation?",
|
||||
"completely lost", "navigated effortlessly"),
|
||||
new FeedbackDimension(
|
||||
"discovery_accuracy",
|
||||
"Did capability search return relevant services for your intent?",
|
||||
"irrelevant results", "exactly what I needed"),
|
||||
new FeedbackDimension(
|
||||
"trust_signal_clarity",
|
||||
"Were service trust levels (O-level) useful for your decision-making?",
|
||||
"confusing / irrelevant", "clear and decisive"),
|
||||
new FeedbackDimension(
|
||||
"schema_completeness",
|
||||
"Was the service information (BSM) complete enough to proceed without guessing?",
|
||||
"critical fields missing", "everything I needed was there"),
|
||||
new FeedbackDimension(
|
||||
"sandbox_setup",
|
||||
"How easy was sandbox creation and first service registration?",
|
||||
"blocked / confusing", "up and running immediately"),
|
||||
new FeedbackDimension(
|
||||
"rate_limit_adequacy",
|
||||
"Was the rate limit adequate for your testing workload?",
|
||||
"severely limiting", "no constraint felt"),
|
||||
new FeedbackDimension(
|
||||
"service_cap_adequacy",
|
||||
"Was the service registration limit adequate for your testing needs?",
|
||||
"hit the cap immediately", "never reached it"),
|
||||
new FeedbackDimension(
|
||||
"liveness_signal_accuracy",
|
||||
"Did liveness status reflect the service's actual reachability?",
|
||||
"liveness lied repeatedly", "always matched reality"),
|
||||
new FeedbackDimension(
|
||||
"error_message_quality",
|
||||
"Were error responses informative enough to correct your request without guessing?",
|
||||
"opaque / misleading", "clear and immediately actionable"),
|
||||
new FeedbackDimension(
|
||||
"extension_property_coverage",
|
||||
"Did the standard BSM fields cover what you needed, or did you rely on custom extension properties for information that should be standardised?",
|
||||
"had to use extensions for basic info", "standard fields covered everything")
|
||||
);
|
||||
|
||||
public static int ratePerMinute(String tier) {
|
||||
return switch (tier) {
|
||||
case "STANDARD" -> 300;
|
||||
case "PROFESSIONAL" -> 1_000;
|
||||
case "COMMUNITY" -> 50_000; // stress-test tier
|
||||
case "FOUNDER" -> 5_000;
|
||||
case "DEMO" -> 10_000; // demo ecosystem
|
||||
default -> 60; // FREE
|
||||
};
|
||||
}
|
||||
|
||||
/** NULL = unlimited. */
|
||||
public static Integer maxServices(String tier) {
|
||||
return switch (tier) {
|
||||
case "STANDARD" -> 50;
|
||||
case "PROFESSIONAL" -> 200;
|
||||
case "COMMUNITY" -> null;
|
||||
case "FOUNDER" -> null;
|
||||
case "DEMO" -> null;
|
||||
default -> 10; // FREE
|
||||
};
|
||||
}
|
||||
|
||||
/** NULL = unlimited. */
|
||||
public static Integer maxOrgs(String tier) {
|
||||
return switch (tier) {
|
||||
case "STANDARD" -> 10;
|
||||
case "PROFESSIONAL" -> 50;
|
||||
case "COMMUNITY" -> null;
|
||||
case "FOUNDER" -> null;
|
||||
case "DEMO" -> null;
|
||||
default -> 3; // FREE
|
||||
};
|
||||
}
|
||||
|
||||
public static long lifetimeDays(String tier) {
|
||||
return switch (tier) {
|
||||
case "STANDARD" -> 180;
|
||||
case "PROFESSIONAL" -> 365;
|
||||
case "COMMUNITY" -> 90;
|
||||
case "FOUNDER" -> 36_500; // 100 years
|
||||
case "DEMO" -> 36_500; // permanent — never purged
|
||||
default -> 30; // FREE
|
||||
};
|
||||
}
|
||||
|
||||
private static String generateKey(String prefix) {
|
||||
byte[] bytes = new byte[32];
|
||||
new SecureRandom().nextBytes(bytes);
|
||||
return prefix + HexFormat.of().formatHex(bytes);
|
||||
}
|
||||
|
||||
private static String hash(String value) {
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
return HexFormat.of().formatHex(
|
||||
digest.digest(value.getBytes(StandardCharsets.UTF_8)));
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalStateException("SHA-256 unavailable", e);
|
||||
}
|
||||
}
|
||||
|
||||
public record SandboxCreationResult(SandboxEntity sandbox, String plainKey, String plainMaintenanceKey) {}
|
||||
}
|
||||
@@ -19,6 +19,12 @@ quarkus.liquibase.change-log=db/changelog/db.changelog-master.xml
|
||||
# ── HTTP ──────────────────────────────────────────────────────────────────────
|
||||
quarkus.http.port=8180
|
||||
|
||||
# ── Registry identity — used by IndexResource for HATEOAS links ───────────────
|
||||
apix.registry.base-url=${APIX_REGISTRY_BASE_URL:http://localhost:8180}
|
||||
apix.registry.name=${APIX_REGISTRY_NAME:APIX Registry}
|
||||
apix.portal.base-url=${APIX_PORTAL_BASE_URL:https://www.api-index.org}
|
||||
apix.registry.description=${APIX_REGISTRY_DESCRIPTION:The open autonomous agent service discovery registry. Follow _links.services to browse, or _links.servicesSearch to filter by capability.}
|
||||
|
||||
# ── Security — API key for write endpoints ───────────────────────────────────
|
||||
apix.api-key=${APIX_API_KEY:dev-insecure-key-change-in-prod}
|
||||
|
||||
@@ -41,6 +47,22 @@ apix.mail.signing.public-key-base64=${APIX_MAIL_SIGNING_PUBLIC_KEY:}
|
||||
apix.mail.signing.kid=${APIX_MAIL_SIGNING_KID:dev}
|
||||
apix.sanctions.cache-path=${SANCTIONS_CACHE_PATH:./sanctions-cache}
|
||||
|
||||
# ── Cache ─────────────────────────────────────────────────────────────────────
|
||||
# registry-index: caches GET / response. 60s TTL is acceptable — agents read the
|
||||
# root for navigation links which are static; counts are informational only.
|
||||
# CDN layer sits in front for edge caching. CDN choice is a governance decision:
|
||||
# no founding member candidate may operate infrastructure over the registry.
|
||||
# - Bunny.net (primary): European (Slovenia), 100+ PoPs, Africa + Asia-Pacific
|
||||
# coverage, privacy values align with Swiss Stiftung model. No AI/agent play.
|
||||
# - Fastly (secondary/fallback): independent US public company, no AI/agent play,
|
||||
# built for API/JSON caching, used by GitHub and npm, strong developer trust.
|
||||
# - DO NOT use Cloudflare (founding member target) or AWS CloudFront (AWS is a
|
||||
# founding member target): operational infrastructure = governance leverage,
|
||||
# regardless of what the founding charter says.
|
||||
quarkus.cache.caffeine.registry-index.expire-after-write=60S
|
||||
quarkus.cache.caffeine.registry-index.initial-capacity=1
|
||||
quarkus.cache.caffeine.registry-index.maximum-size=1
|
||||
|
||||
# ── Logging ───────────────────────────────────────────────────────────────────
|
||||
quarkus.log.level=${LOG_LEVEL:DEBUG}
|
||||
quarkus.log.console.json=false
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="012" author="apix" dbms="postgresql" runInTransaction="false">
|
||||
<!-- GIN index on the capabilities array inside the bsm_payload JSONB column.
|
||||
GET /services?capability=X uses a JSONB containment query (@>) which
|
||||
requires a GIN index to avoid a full table scan at any practical
|
||||
registry size. Without this index every capability search is O(n). -->
|
||||
<sql>
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_services_capabilities
|
||||
ON services USING GIN ((bsm_payload -> 'capabilities'));
|
||||
</sql>
|
||||
<rollback>
|
||||
DROP INDEX CONCURRENTLY IF EXISTS idx_services_capabilities;
|
||||
</rollback>
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,67 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="013" author="apix">
|
||||
|
||||
<!-- Sandbox registry: isolated test namespaces, key-scoped, tier-limited -->
|
||||
<createTable tableName="sandboxes">
|
||||
<column name="id" type="uuid" defaultValueComputed="gen_random_uuid()">
|
||||
<constraints primaryKey="true" nullable="false"/>
|
||||
</column>
|
||||
<!-- URL-safe slug chosen by registrant, e.g. "openclaw" -->
|
||||
<column name="name" type="varchar(100)">
|
||||
<constraints nullable="false" unique="true" uniqueConstraintName="uq_sandbox_name"/>
|
||||
</column>
|
||||
<column name="contact_email" type="varchar(255)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- SHA-256 hex hash of the plaintext key — never stored in plaintext -->
|
||||
<column name="api_key_hash" type="varchar(64)">
|
||||
<constraints nullable="false" unique="true" uniqueConstraintName="uq_sandbox_api_key_hash"/>
|
||||
</column>
|
||||
<!-- FREE | STANDARD | PROFESSIONAL | FOUNDER -->
|
||||
<column name="tier" type="varchar(50)" defaultValue="FREE">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Cached from tier at creation time; surfaced in response so clients know their ceiling -->
|
||||
<column name="rate_per_minute" type="integer" defaultValueNumeric="60">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="created_at" type="timestamptz" defaultValueComputed="now()">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- FREE = created_at + 30 days; extended by tier upgrade -->
|
||||
<column name="expires_at" type="timestamptz">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<!-- Scope services to a sandbox: NULL = production, set = sandbox namespace -->
|
||||
<addColumn tableName="services">
|
||||
<column name="sandbox_id" type="varchar(100)"/>
|
||||
</addColumn>
|
||||
|
||||
<!-- Drop the global endpoint_url unique constraint — sandbox rows share URLs freely -->
|
||||
<dropUniqueConstraint tableName="services" constraintName="uq_services_endpoint_url"/>
|
||||
|
||||
<!-- Production rows: endpoint_url unique among production only -->
|
||||
<sql>
|
||||
CREATE UNIQUE INDEX uq_services_endpoint_production
|
||||
ON services(endpoint_url)
|
||||
WHERE sandbox_id IS NULL;
|
||||
</sql>
|
||||
|
||||
<!-- Index for sandbox queries: list all services in a given sandbox -->
|
||||
<sql>
|
||||
CREATE INDEX idx_services_sandbox_id
|
||||
ON services(sandbox_id)
|
||||
WHERE sandbox_id IS NOT NULL;
|
||||
</sql>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="014" author="apix">
|
||||
|
||||
<!-- Per-sandbox request counters — upserted on every tracked endpoint call.
|
||||
Intentionally separate from production Micrometer metrics: sandbox activity
|
||||
must never inflate production telemetry. -->
|
||||
<createTable tableName="sandbox_usage_stats">
|
||||
<column name="sandbox_id" type="varchar(100)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- SERVICE_REGISTERED | SERVICE_LISTED | SERVICE_SEARCHED | SANDBOX_VIEWED -->
|
||||
<column name="event_type" type="varchar(50)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="request_count" type="bigint" defaultValueNumeric="0">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="last_requested_at" type="timestamptz"/>
|
||||
</createTable>
|
||||
|
||||
<addPrimaryKey tableName="sandbox_usage_stats"
|
||||
columnNames="sandbox_id, event_type"
|
||||
constraintName="pk_sandbox_usage_stats"/>
|
||||
|
||||
<addForeignKeyConstraint
|
||||
baseTableName="sandbox_usage_stats"
|
||||
baseColumnNames="sandbox_id"
|
||||
referencedTableName="sandboxes"
|
||||
referencedColumnNames="name"
|
||||
constraintName="fk_sandbox_usage_sandbox_name"
|
||||
onDelete="CASCADE"/>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="015" author="apix">
|
||||
|
||||
<!-- Registration caps per sandbox. NULL = unlimited (COMMUNITY / FOUNDER tiers).
|
||||
Stored explicitly so BSF can override per partner without changing tier. -->
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="max_services" type="integer">
|
||||
<constraints nullable="true"/>
|
||||
</column>
|
||||
</addColumn>
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="max_orgs" type="integer">
|
||||
<constraints nullable="true"/>
|
||||
</column>
|
||||
</addColumn>
|
||||
|
||||
<!-- Back-fill existing FREE sandboxes -->
|
||||
<sql>UPDATE sandboxes SET max_services = 10, max_orgs = 3 WHERE tier = 'FREE';</sql>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="016" author="apix">
|
||||
|
||||
<!-- Agent experience feedback — scores stored as JSONB so new dimensions
|
||||
require no schema migration. One row per submission. -->
|
||||
<createTable tableName="sandbox_feedback">
|
||||
<column name="id" type="uuid" defaultValueComputed="gen_random_uuid()">
|
||||
<constraints primaryKey="true" nullable="false"/>
|
||||
</column>
|
||||
<column name="sandbox_id" type="varchar(100)">
|
||||
<constraints nullable="false"
|
||||
foreignKeyName="fk_sandbox_feedback_sandbox"
|
||||
references="sandboxes(name)"
|
||||
deleteCascade="true"/>
|
||||
</column>
|
||||
<!-- { "hateoas_navigation": 8, "discovery_accuracy": 7, ... } -->
|
||||
<column name="scores" type="jsonb">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<!-- Optional: agent may self-identify (tool name, version string, etc.) -->
|
||||
<column name="agent_identifier" type="varchar(255)"/>
|
||||
<column name="comment" type="varchar(500)"/>
|
||||
<column name="submitted_at" type="timestamptz" defaultValueComputed="now()">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<createIndex tableName="sandbox_feedback" indexName="idx_sandbox_feedback_sandbox_id">
|
||||
<column name="sandbox_id"/>
|
||||
</createIndex>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="017" author="apix">
|
||||
|
||||
<!-- Model identity on feedback submissions — enables cross-model score analysis.
|
||||
Both nullable: agents may not know or may choose not to disclose. -->
|
||||
<addColumn tableName="sandbox_feedback">
|
||||
<!-- Full model identifier as the agent knows it: "claude-sonnet-4-6", "gpt-4o-2024-11-20" -->
|
||||
<column name="model_identifier" type="varchar(255)"/>
|
||||
</addColumn>
|
||||
<addColumn tableName="sandbox_feedback">
|
||||
<!-- Provider family: "anthropic", "openai", "google", "meta", "mistral" … -->
|
||||
<column name="model_provider" type="varchar(100)"/>
|
||||
</addColumn>
|
||||
|
||||
<!-- Index for provider-grouped aggregate queries -->
|
||||
<createIndex tableName="sandbox_feedback" indexName="idx_sandbox_feedback_model_provider">
|
||||
<column name="model_provider"/>
|
||||
</createIndex>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,26 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="018" author="apix">
|
||||
|
||||
<!-- Optional registrar location: owner-declared free-text (e.g. "Berlin, Germany").
|
||||
Raw IP is never stored — resolved to coordinates once at registration time.
|
||||
geo_consent_given records that the owner explicitly chose to share their location;
|
||||
required for GDPR Art. 7 accountability (demonstrating consent was given). -->
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="registrar_location" type="varchar(255)"/>
|
||||
</addColumn>
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="registrar_lat" type="double precision"/>
|
||||
</addColumn>
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="registrar_lon" type="double precision"/>
|
||||
</addColumn>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,47 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="019" author="apix">
|
||||
|
||||
<!-- Agent visit geo-points for the sandbox world map.
|
||||
Raw IP is NEVER stored — resolved to lat/lon at request time and discarded.
|
||||
Storing only city-level coordinates is GDPR-compliant under legitimate interest
|
||||
(Art. 6(1)(f)): aggregate, non-identifying usage analytics for the sandbox owner. -->
|
||||
<createTable tableName="sandbox_agent_visits">
|
||||
<column name="id" type="uuid" defaultValueComputed="gen_random_uuid()">
|
||||
<constraints primaryKey="true" nullable="false"/>
|
||||
</column>
|
||||
<column name="sandbox_id" type="varchar(100)">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="agent_lat" type="double precision">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="agent_lon" type="double precision">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
<column name="visited_at" type="timestamptz" defaultValueComputed="now()">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</createTable>
|
||||
|
||||
<addForeignKeyConstraint
|
||||
baseTableName="sandbox_agent_visits"
|
||||
baseColumnNames="sandbox_id"
|
||||
referencedTableName="sandboxes"
|
||||
referencedColumnNames="name"
|
||||
constraintName="fk_agent_visits_sandbox_name"
|
||||
onDelete="CASCADE"/>
|
||||
|
||||
<createIndex tableName="sandbox_agent_visits" indexName="idx_agent_visits_sandbox_time">
|
||||
<column name="sandbox_id"/>
|
||||
<column name="visited_at" descending="true"/>
|
||||
</createIndex>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="020" author="apix">
|
||||
|
||||
<!-- Sandbox name is now a display label only — not a resource identifier.
|
||||
UUID is the sole lookup key. All FKs and the unique constraint that tied
|
||||
sandbox_id = name are dropped; the varchar(100) columns remain and now
|
||||
store UUID strings. Existing sandbox data is purged because old name-based
|
||||
API keys and routes are incompatible with the new UUID-only routes. -->
|
||||
|
||||
<sql>TRUNCATE TABLE sandbox_agent_visits, sandbox_usage_stats, sandbox_feedback, sandboxes CASCADE;</sql>
|
||||
|
||||
<!-- Drop FKs before unique constraint — they reference the index backing uq_sandbox_name -->
|
||||
<dropForeignKeyConstraint baseTableName="sandbox_usage_stats" constraintName="fk_sandbox_usage_sandbox_name"/>
|
||||
<dropForeignKeyConstraint baseTableName="sandbox_agent_visits" constraintName="fk_agent_visits_sandbox_name"/>
|
||||
<dropForeignKeyConstraint baseTableName="sandbox_feedback" constraintName="fk_sandbox_feedback_sandbox"/>
|
||||
|
||||
<!-- Drop name uniqueness — name is a label, may repeat -->
|
||||
<dropUniqueConstraint tableName="sandboxes" constraintName="uq_sandbox_name"/>
|
||||
|
||||
<!-- services.sandbox_id had no FK to sandboxes — nothing to drop there. -->
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="021" author="apix">
|
||||
|
||||
<!-- Second key returned at registration, shown once only.
|
||||
Used exclusively for administrative operations:
|
||||
- PATCH /sandbox/{uuid}/extend (renew expiry)
|
||||
- PATCH /sandbox/{uuid}/api-key (rotate service key)
|
||||
Separation of concerns: agents embed the apiKey; the owner keeps the
|
||||
maintenanceKey for lifecycle control without exposing it to agents. -->
|
||||
<addColumn tableName="sandboxes">
|
||||
<column name="maintenance_key_hash" type="varchar(64)" defaultValue="PENDING">
|
||||
<constraints nullable="false"/>
|
||||
</column>
|
||||
</addColumn>
|
||||
|
||||
<!-- Back-fill placeholder removed immediately after adding the column so no
|
||||
real row keeps the literal "PENDING" value. Existing sandboxes were
|
||||
purged in changeset 020; this table is empty at this point. -->
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<databaseChangeLog
|
||||
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.27.xsd">
|
||||
|
||||
<changeSet id="022" author="apix">
|
||||
|
||||
<!-- Open-beta / training mode: registrants who are bots or classrooms
|
||||
do not always have a meaningful contact email. Email is still
|
||||
accepted and stored when provided; omitting it is valid. -->
|
||||
<dropNotNullConstraint tableName="sandboxes" columnName="contact_email"/>
|
||||
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
||||
@@ -16,5 +16,16 @@
|
||||
<include file="changes/009-iot-profiles.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/010-organizations.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/011-org-rotation-challenge.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/012-capabilities-gin-index.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/013-sandbox.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/014-sandbox-usage.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/015-sandbox-caps.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/016-sandbox-feedback.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/017-feedback-model-info.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/018-sandbox-geo.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/019-sandbox-agent-visits.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/020-sandbox-uuid-routing.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/021-sandbox-maintenance-key.xml" relativeToChangelogFile="true"/>
|
||||
<include file="changes/022-optional-email.xml" relativeToChangelogFile="true"/>
|
||||
|
||||
</databaseChangeLog>
|
||||
|
||||
+63
@@ -0,0 +1,63 @@
|
||||
package org.botstandards.apix.registry.service;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
class GeoServiceTest {
|
||||
|
||||
// ── extractClientIp ──────────────────────────────────────────────────────
|
||||
|
||||
@Test
|
||||
void extractClientIp_returnsFirstIp_whenMultiplePresent() {
|
||||
assertThat(GeoService.extractClientIp("203.0.113.42, 10.0.0.1, 172.16.0.1"))
|
||||
.isEqualTo("203.0.113.42");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractClientIp_trimsWhitespace() {
|
||||
assertThat(GeoService.extractClientIp(" 198.51.100.7 , 10.0.0.2"))
|
||||
.isEqualTo("198.51.100.7");
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractClientIp_returnsNull_forNullInput() {
|
||||
assertThat(GeoService.extractClientIp(null)).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractClientIp_returnsNull_forBlankInput() {
|
||||
assertThat(GeoService.extractClientIp(" ")).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractClientIp_handlesStandaloneIp() {
|
||||
assertThat(GeoService.extractClientIp("203.0.113.1")).isEqualTo("203.0.113.1");
|
||||
}
|
||||
|
||||
// ── isPrivateOrLoopback ──────────────────────────────────────────────────
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = { "127.0.0.1", "::1", "localhost", "10.0.0.1", "10.255.255.255",
|
||||
"192.168.0.1", "192.168.255.254", "172.16.0.1", "172.31.255.255" })
|
||||
void isPrivateOrLoopback_returnsTrue_forPrivateAddresses(String ip) {
|
||||
assertThat(GeoService.isPrivateOrLoopback(ip)).isTrue();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = { "8.8.8.8", "203.0.113.1", "198.51.100.42",
|
||||
"172.15.255.255", "172.32.0.1", "1.1.1.1" })
|
||||
void isPrivateOrLoopback_returnsFalse_forPublicAddresses(String ip) {
|
||||
assertThat(GeoService.isPrivateOrLoopback(ip)).isFalse();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({ "172.16.0.1,true", "172.20.5.5,true", "172.31.0.0,true",
|
||||
"172.15.0.1,false", "172.32.0.1,false" })
|
||||
void isPrivateOrLoopback_handles172Range(String ip, boolean expected) {
|
||||
assertThat(GeoService.isPrivateOrLoopback(ip)).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
package org.botstandards.apix.spider;
|
||||
|
||||
import io.quarkus.logging.Log;
|
||||
import io.quarkus.scheduler.Scheduled;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* Purges expired sandboxes and all their associated data.
|
||||
* Single concern: cleanup. No liveness probing, no reporting.
|
||||
*
|
||||
* Deletion order respects the service_versions → services FK.
|
||||
* The sandbox child tables (feedback, usage_stats, agent_visits) have no
|
||||
* active FK constraints after changeset 020, but are still cleaned up
|
||||
* explicitly to avoid orphaned rows.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class SandboxCleanupJob {
|
||||
|
||||
@Inject
|
||||
DataSource dataSource;
|
||||
|
||||
@Scheduled(cron = "${apix.spider.cleanup-cron:0 0 * * * ?}")
|
||||
void purgeExpiredSandboxes() {
|
||||
try (Connection conn = dataSource.getConnection()) {
|
||||
conn.setAutoCommit(false);
|
||||
try {
|
||||
// DEMO tier sandboxes never expire — they are excluded from all deletes
|
||||
long count = countExpired(conn);
|
||||
if (count == 0) {
|
||||
conn.rollback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Only FREE sandboxes auto-expire; STANDARD+ are paid and stay until explicit cancellation
|
||||
// service_versions references services.id — delete versions first
|
||||
exec(conn,
|
||||
"DELETE FROM service_versions sv " +
|
||||
"USING services s, sandboxes sb " +
|
||||
"WHERE sv.service_id = s.id " +
|
||||
" AND s.sandbox_id = sb.id::text " +
|
||||
" AND sb.expires_at < now() AND sb.tier = 'FREE'");
|
||||
|
||||
exec(conn,
|
||||
"DELETE FROM services s " +
|
||||
"USING sandboxes sb " +
|
||||
"WHERE s.sandbox_id = sb.id::text " +
|
||||
" AND sb.expires_at < now() AND sb.tier = 'FREE'");
|
||||
|
||||
exec(conn,
|
||||
"DELETE FROM sandbox_feedback sf " +
|
||||
"USING sandboxes sb " +
|
||||
"WHERE sf.sandbox_id = sb.id::text " +
|
||||
" AND sb.expires_at < now() AND sb.tier = 'FREE'");
|
||||
|
||||
exec(conn,
|
||||
"DELETE FROM sandbox_usage_stats su " +
|
||||
"USING sandboxes sb " +
|
||||
"WHERE su.sandbox_id = sb.id::text " +
|
||||
" AND sb.expires_at < now() AND sb.tier = 'FREE'");
|
||||
|
||||
exec(conn,
|
||||
"DELETE FROM sandbox_agent_visits av " +
|
||||
"USING sandboxes sb " +
|
||||
"WHERE av.sandbox_id = sb.id::text " +
|
||||
" AND sb.expires_at < now() AND sb.tier = 'FREE'");
|
||||
|
||||
exec(conn, "DELETE FROM sandboxes WHERE expires_at < now() AND tier = 'FREE'");
|
||||
|
||||
conn.commit();
|
||||
Log.infof("Purged %d expired sandbox(es)", count);
|
||||
} catch (SQLException e) {
|
||||
conn.rollback();
|
||||
Log.errorf(e, "Sandbox cleanup failed — transaction rolled back");
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
Log.errorf(e, "Sandbox cleanup failed — could not obtain connection");
|
||||
}
|
||||
}
|
||||
|
||||
private static long countExpired(Connection conn) throws SQLException {
|
||||
try (var stmt = conn.prepareStatement(
|
||||
"SELECT COUNT(*) FROM sandboxes WHERE expires_at < now() AND tier = 'FREE'");
|
||||
var rs = stmt.executeQuery()) {
|
||||
rs.next();
|
||||
return rs.getLong(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void exec(Connection conn, String sql) throws SQLException {
|
||||
try (var stmt = conn.prepareStatement(sql)) {
|
||||
stmt.executeUpdate();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
quarkus.http.port=8082
|
||||
quarkus.smallrye-health.root-path=/q/health
|
||||
quarkus.log.level=${LOG_LEVEL:INFO}
|
||||
|
||||
# DB — spider connects to the same database; does NOT run Liquibase (registry owns schema)
|
||||
quarkus.datasource.db-kind=postgresql
|
||||
quarkus.datasource.jdbc.url=${DB_URL:jdbc:postgresql://localhost:5432/apix}
|
||||
quarkus.datasource.username=${DB_USER:apix}
|
||||
quarkus.datasource.password=${DB_PASSWORD:apix}
|
||||
quarkus.hibernate-orm.database.generation=none
|
||||
@@ -0,0 +1,278 @@
|
||||
# DNS Migration: IONOS → BunnyDNS
|
||||
|
||||
Safe, zero-downtime migration of `api-index.org` from IONOS to BunnyDNS,
|
||||
enabling apex CNAME support for the Bunny.net CDN.
|
||||
|
||||
**Risk profile:** Low if the checklist is followed in order. The domain stays
|
||||
fully operational at every step. IONOS remains the authoritative fallback
|
||||
until you explicitly confirm the migration is complete.
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
```
|
||||
Phase 0 — Audit Inventory every record in IONOS. Nothing changes.
|
||||
Phase 1 — Reduce TTL Lower TTL to 300 s. Wait for old TTL to drain.
|
||||
Phase 2 — Mirror Replicate all records into BunnyDNS. Verify.
|
||||
Phase 3 — Nameservers Change NS at IONOS registrar → Bunny.net servers.
|
||||
Phase 4 — CDN switch Replace A record with CNAME to Bunny.net CDN edge.
|
||||
Phase 5 — Cleanup Confirm everything. IONOS zone stays intact as archive.
|
||||
```
|
||||
|
||||
**Total calendar time:** 2–3 days minimum (TTL drain + propagation windows).
|
||||
You can compress to ~24 h if IONOS allows TTL = 60 s and you monitor closely.
|
||||
|
||||
---
|
||||
|
||||
## Phase 0 — Audit Existing Records
|
||||
|
||||
**Do this before touching anything.**
|
||||
|
||||
Log in to IONOS → Domains & SSL → `api-index.org` → DNS.
|
||||
|
||||
Export or manually list every record. With no email on the domain the expected
|
||||
records are minimal:
|
||||
|
||||
| Type | Name | Value | Notes |
|
||||
|------|------|-------|-------|
|
||||
| A | `@` | VPS IP or IONOS parking IP | Migrate this |
|
||||
| A / CNAME | `www` | same IP or alias | Migrate this |
|
||||
| CNAME | `_domainconnect` | IONOS internal | **Do NOT migrate — IONOS-specific** |
|
||||
|
||||
No MX, SPF, DKIM, or DMARC records exist (no email = no mail records).
|
||||
This makes the migration low-risk: there is nothing here that can cause an
|
||||
hours-long silent failure. The worst case is the website is briefly unreachable,
|
||||
which resolves as soon as you revert the nameservers.
|
||||
|
||||
**Identify the current TTL** for each record. IONOS default is often 3600 s (1 h).
|
||||
That is the minimum wait time after reducing TTL before the change is globally drained.
|
||||
|
||||
---
|
||||
|
||||
## Phase 1 — Reduce TTL
|
||||
|
||||
Change every record's TTL in IONOS to **300 seconds** (5 minutes).
|
||||
|
||||
IONOS UI path: DNS record → Edit → TTL field.
|
||||
If IONOS does not allow TTL below 3600 on your plan, use 3600 — it just means
|
||||
a longer wait in Phase 3.
|
||||
|
||||
**After saving the reduced TTLs, wait for the old TTL to fully drain.**
|
||||
If the old TTL was 3600 s (1 h), wait at least 1 hour before proceeding.
|
||||
This ensures no resolver is caching the old TTL value.
|
||||
|
||||
Verify the reduced TTL is live from an external resolver:
|
||||
```bash
|
||||
# Should show TTL=300 (or close to it) in the answer section
|
||||
dig api-index.org A +noall +answer
|
||||
dig api-index.org MX +noall +answer
|
||||
```
|
||||
|
||||
Do not proceed to Phase 2 until the TTL shown in `dig` output matches your
|
||||
reduced value.
|
||||
|
||||
---
|
||||
|
||||
## Phase 2 — Mirror Records into BunnyDNS
|
||||
|
||||
### 2a. Create the BunnyDNS zone
|
||||
|
||||
1. Log in to Bunny.net → DNS → Add Zone
|
||||
2. Enter `api-index.org` and confirm
|
||||
3. Bunny.net assigns two nameservers (e.g. `kiki.bunny.net`, `coco.bunny.net`) —
|
||||
note these for Phase 3
|
||||
|
||||
### 2b. Add every record from Phase 0
|
||||
|
||||
Replicate the full record list into BunnyDNS exactly — same name, same value,
|
||||
same type. Use TTL = 300 for all records during migration.
|
||||
|
||||
**Do not add:**
|
||||
- `_domainconnect` — IONOS-internal, not needed
|
||||
- IONOS parking/redirect entries — not needed
|
||||
|
||||
**Do add:**
|
||||
- A record `@` → VPS IP
|
||||
- A record (or CNAME) `www` → VPS IP (or `api-index.org`)
|
||||
|
||||
For the apex A record (`@`), add it pointing to the **Hetzner VPS IP** for now.
|
||||
You will convert it to the CDN CNAME in Phase 4 — not before.
|
||||
|
||||
### 2c. Verify the BunnyDNS zone before touching IONOS
|
||||
|
||||
Use `dig` with BunnyDNS as the explicit resolver to query the zone before
|
||||
nameserver delegation. Replace `kiki.bunny.net` with your assigned nameserver:
|
||||
|
||||
```bash
|
||||
# A record — should return VPS IP
|
||||
dig @kiki.bunny.net api-index.org A +short
|
||||
|
||||
# www
|
||||
dig @kiki.bunny.net www.api-index.org A +short
|
||||
```
|
||||
|
||||
Compare every answer against your Phase 0 audit.
|
||||
**Do not proceed to Phase 3 until all records match.**
|
||||
|
||||
---
|
||||
|
||||
## Phase 3 — Switch Nameservers at IONOS
|
||||
|
||||
This is the single step that transfers authority. Once saved, resolvers will
|
||||
gradually start querying BunnyDNS instead of IONOS. IONOS DNS zone remains
|
||||
intact — you are only changing where resolvers are pointed, not deleting anything.
|
||||
|
||||
### 3a. Change nameservers in IONOS
|
||||
|
||||
IONOS UI path: Domains & SSL → `api-index.org` → Nameservers → Use custom nameservers
|
||||
|
||||
Enter the two nameservers from BunnyDNS (e.g.):
|
||||
```
|
||||
kiki.bunny.net
|
||||
coco.bunny.net
|
||||
```
|
||||
|
||||
Save. IONOS will show a warning that custom nameservers override IONOS DNS — confirm.
|
||||
|
||||
### 3b. Wait for propagation
|
||||
|
||||
Propagation is complete when global resolvers return the BunnyDNS IP for your domain.
|
||||
This typically takes 15–60 minutes with TTL=300. It can take up to 48 hours in rare
|
||||
cases (resolvers ignoring low TTLs). Monitor:
|
||||
|
||||
```bash
|
||||
# Repeat every few minutes — watch for the IONOS nameservers to disappear
|
||||
dig api-index.org NS +short
|
||||
|
||||
# Check from multiple global vantage points
|
||||
# https://dnschecker.org/#A/api-index.org — paste in browser, check all green
|
||||
```
|
||||
|
||||
**During propagation:** Some resolvers still use IONOS, some use BunnyDNS.
|
||||
Both zones have identical records pointing to the VPS IP — so the site stays up
|
||||
regardless of which nameserver a resolver hits.
|
||||
|
||||
### 3c. Verify after propagation
|
||||
|
||||
```bash
|
||||
# NS records should now show Bunny.net nameservers from all resolvers
|
||||
dig api-index.org NS +short
|
||||
|
||||
# A record resolves to VPS IP
|
||||
dig api-index.org A +short
|
||||
|
||||
# End-to-end HTTPS
|
||||
curl -sv https://api-index.org/ 2>&1 | grep -E "HTTP|certificate"
|
||||
```
|
||||
|
||||
Do not proceed to Phase 4 until all checks pass.
|
||||
|
||||
---
|
||||
|
||||
## Phase 4 — Switch Apex Record to CDN CNAME
|
||||
|
||||
Only do this after Phase 3 is fully confirmed.
|
||||
|
||||
### 4a. Run the Bunny.net CDN setup script
|
||||
|
||||
If not already done, provision the pull zone:
|
||||
|
||||
```bash
|
||||
BUNNYNET_API_KEY=your-key \
|
||||
ORIGIN_URL=https://<vps-ip> \
|
||||
CUSTOM_HOSTNAME=api-index.org \
|
||||
SYSLOG_HOST=<vps-ip> \
|
||||
INSTALL_CRON=true \
|
||||
./scripts/setup-bunnynet.sh
|
||||
```
|
||||
|
||||
Note the CDN hostname printed at the end (e.g. `apix-registry.b-cdn.net`).
|
||||
|
||||
### 4b. Replace the apex A record with a CNAME in BunnyDNS
|
||||
|
||||
In BunnyDNS → `api-index.org` zone:
|
||||
|
||||
1. Delete the `@` A record pointing to the VPS IP
|
||||
2. Add a `CNAME` record: `@` → `apix-registry.b-cdn.net` (your CDN hostname)
|
||||
|
||||
BunnyDNS supports CNAME at the apex via automatic flattening — this is why
|
||||
you migrated here.
|
||||
|
||||
### 4c. Verify CDN is serving the domain
|
||||
|
||||
```bash
|
||||
# A record now resolves to Bunny.net edge IP (not VPS IP)
|
||||
dig api-index.org A +short
|
||||
|
||||
# HTTPS still works
|
||||
curl -sv https://api-index.org/ 2>&1 | grep HTTP
|
||||
|
||||
# Second request should be a CDN cache HIT
|
||||
curl -sI "https://api-index.org/services?capability=nlp" | grep -i cache
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 5 — Cleanup and Archive
|
||||
|
||||
**Do not delete the IONOS DNS zone.** Leave it intact as a ready-to-activate
|
||||
fallback for at least 30 days. If something goes wrong after Phase 3, you can
|
||||
revert by changing the nameservers back to IONOS in the registrar — the zone
|
||||
still has all the correct records.
|
||||
|
||||
After 30 days of stable operation:
|
||||
- IONOS zone can be deleted or left (it costs nothing to keep)
|
||||
- Document the Bunny.net nameservers in `.env` or `docs/` for future reference
|
||||
|
||||
---
|
||||
|
||||
## Rollback Procedure
|
||||
|
||||
At any phase before Phase 3 is complete: nothing has changed — no rollback needed.
|
||||
|
||||
After Phase 3 (nameserver switch):
|
||||
```
|
||||
IONOS → Domains → api-index.org → Nameservers → Use IONOS nameservers
|
||||
```
|
||||
Propagation back takes 5–60 minutes with TTL=300.
|
||||
The IONOS zone was never modified — it still has all records.
|
||||
|
||||
After Phase 4 (CNAME switch):
|
||||
1. In BunnyDNS: delete the CNAME `@`, re-add the A record `@` → VPS IP
|
||||
2. The CDN is bypassed; traffic flows directly to VPS again
|
||||
|
||||
---
|
||||
|
||||
## Checklist Summary
|
||||
|
||||
```
|
||||
Phase 0
|
||||
[ ] Audit and export all records from IONOS (expected: A @ , A/CNAME www, _domainconnect)
|
||||
[ ] Identify current TTL values
|
||||
[ ] Confirm no MX records present (no email on domain)
|
||||
|
||||
Phase 1
|
||||
[ ] Reduce all TTLs to 300 s in IONOS
|
||||
[ ] Wait for old TTL to drain (at minimum the old TTL duration)
|
||||
[ ] dig confirms TTL ~300 in answers
|
||||
|
||||
Phase 2
|
||||
[ ] BunnyDNS zone created for api-index.org
|
||||
[ ] A record @ and www replicated into BunnyDNS (no MX/TXT to worry about)
|
||||
[ ] Verified via: dig @<bunny-ns> api-index.org A and dig @<bunny-ns> www.api-index.org A
|
||||
|
||||
Phase 3
|
||||
[ ] Nameservers changed at IONOS registrar to BunnyDNS
|
||||
[ ] Propagation monitored until global NS shows Bunny.net
|
||||
[ ] HTTPS end-to-end test passes
|
||||
|
||||
Phase 4
|
||||
[ ] CDN pull zone provisioned (setup-bunnynet.sh)
|
||||
[ ] Apex A record replaced with CNAME in BunnyDNS
|
||||
[ ] CDN cache HIT confirmed on second request
|
||||
|
||||
Phase 5
|
||||
[ ] IONOS zone archived (do not delete for 30 days)
|
||||
[ ] Bunny.net nameservers documented
|
||||
```
|
||||
@@ -0,0 +1,793 @@
|
||||
# APIX Registry — Infrastructure Setup Guide
|
||||
|
||||
Complete walkthrough for deploying the APIX registry to a Hetzner VPS with Bunny.net CDN,
|
||||
Prometheus/Grafana observability, and live Loki telemetry.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Architecture Overview](#1-architecture-overview)
|
||||
2. [Prerequisites](#2-prerequisites)
|
||||
3. [VPS Provisioning (Hetzner)](#3-vps-provisioning-hetzner)
|
||||
4. [DNS Configuration](#4-dns-configuration)
|
||||
5. [Server Bootstrap](#5-server-bootstrap)
|
||||
6. [Application Build](#6-application-build)
|
||||
7. [Environment Configuration](#7-environment-configuration)
|
||||
8. [Deploy the Stack](#8-deploy-the-stack)
|
||||
9. [Caddy TLS Reverse Proxy](#9-caddy-tls-reverse-proxy)
|
||||
10. [Bunny.net CDN Setup](#10-bunnynet-cdn-setup)
|
||||
11. [Live Telemetry: Promtail → Loki](#11-live-telemetry-promtail--loki)
|
||||
12. [Grafana Dashboards](#12-grafana-dashboards)
|
||||
13. [Weekly Analytics (Bunny.net Logs)](#13-weekly-analytics-bunnynet-logs)
|
||||
14. [Verification Checklist](#14-verification-checklist)
|
||||
15. [Routine Operations](#15-routine-operations)
|
||||
|
||||
---
|
||||
|
||||
## 1. Architecture Overview
|
||||
|
||||
```
|
||||
Internet
|
||||
│
|
||||
▼
|
||||
Bunny.net CDN (100+ PoPs, GDPR-compliant, European)
|
||||
│ Cache-Control headers respected; query-string vary cache enabled
|
||||
│ HIT: served from edge (~5ms). MISS: forwarded to VPS
|
||||
│
|
||||
▼
|
||||
Hetzner VPS (Helsinki / Falkenstein)
|
||||
│
|
||||
├── Caddy (80/443) ─── TLS termination, HTTPS redirect, rate limiting
|
||||
│ │
|
||||
│ ├── registry:8180 — REST API (Quarkus, JVM)
|
||||
│ ├── portal:8081 — Web UI (Quarkus, Qute templates)
|
||||
│ └── grafana:3000 — Dashboards (internal access only)
|
||||
│
|
||||
├── db:5432 — PostgreSQL 16 (no public port)
|
||||
├── spider:8082 — Liveness checker (no public port)
|
||||
├── prometheus:9090 — Metrics scraper (no public port)
|
||||
└── promtail:9080 — Syslog receiver → Loki (port 5514 open)
|
||||
|
||||
Grafana Cloud (Loki)
|
||||
│ Real-time: Bunny.net → TCP Syslog → Promtail → Loki → Grafana
|
||||
└── Every CDN request visible within 1-2 seconds
|
||||
```
|
||||
|
||||
**CDN governance constraint:** Cloudflare and AWS CloudFront must never be used.
|
||||
Both are founding member candidates — operating infrastructure gives governance leverage
|
||||
regardless of the founding charter. Approved providers: Bunny.net (primary), Fastly (fallback).
|
||||
|
||||
---
|
||||
|
||||
## 2. Prerequisites
|
||||
|
||||
### Local machine
|
||||
| Tool | Version | Purpose |
|
||||
|------|---------|---------|
|
||||
| Java (Temurin) | 21 | Building application JARs |
|
||||
| Maven | 3.9+ | Build system |
|
||||
| Docker Desktop | latest | Local dev + image building |
|
||||
| `curl` | any | Script calls to Bunny.net API |
|
||||
| `python3` | 3.8+ | JSON parsing in setup scripts |
|
||||
|
||||
Install Java 21 via SDKMAN:
|
||||
```bash
|
||||
curl -s https://get.sdkman.io | bash
|
||||
sdk install java 21.0.3-tem
|
||||
```
|
||||
|
||||
### Accounts required
|
||||
| Service | What you need |
|
||||
|---------|---------------|
|
||||
| Hetzner Cloud | API token for VPS creation (optional — can provision manually) |
|
||||
| Bunny.net | Account + API key (`Account → API`) |
|
||||
| Grafana Cloud | Free tier sufficient; Loki + Prometheus endpoints |
|
||||
| Domain registrar | Control over DNS for `api-index.org` |
|
||||
|
||||
---
|
||||
|
||||
## 3. VPS Provisioning (Hetzner)
|
||||
|
||||
### Recommended spec
|
||||
```
|
||||
Type: CPX21 (3 vCPU, 4 GB RAM) — sufficient for MVP
|
||||
Location: Helsinki (hel1) or Falkenstein (fsn1)
|
||||
OS: Ubuntu 24.04 LTS
|
||||
Network: Primary IPv4 + IPv6 dual stack
|
||||
Backups: Enable automatic backups (adds 20% to monthly cost)
|
||||
```
|
||||
|
||||
The CPX21 comfortably runs the full Docker stack (registry + spider + portal + db +
|
||||
prometheus + grafana + caddy + promtail) under MVP load. Upgrade to CPX31 if Prometheus
|
||||
retention or portal traffic grows.
|
||||
|
||||
### Firewall rules (Hetzner firewall or `ufw`)
|
||||
|
||||
| Port | Protocol | Source | Purpose |
|
||||
|------|----------|--------|---------|
|
||||
| 22 | TCP | Your IP only | SSH |
|
||||
| 80 | TCP | any | Caddy HTTP→HTTPS redirect |
|
||||
| 443 | TCP+UDP | any | Caddy HTTPS + HTTP/3 |
|
||||
| 5514 | TCP | Bunny.net IPs | Promtail syslog receiver |
|
||||
| 9090 | TCP | VPS localhost | Prometheus (internal only) |
|
||||
| 3000 | TCP | VPS localhost | Grafana (access via Caddy or SSH tunnel) |
|
||||
|
||||
**Bunny.net syslog source IPs:** Bunny.net does not publish a static IP list; open 5514
|
||||
to `0.0.0.0/0` and rely on the Promtail pipeline to discard unexpected traffic.
|
||||
The syslog format is the only authentication layer needed at this volume.
|
||||
|
||||
### SSH hardening (run as root after first login)
|
||||
```bash
|
||||
# Create deploy user
|
||||
useradd -m -s /bin/bash deploy
|
||||
usermod -aG sudo,docker deploy
|
||||
|
||||
# Copy your SSH public key
|
||||
mkdir -p /home/deploy/.ssh
|
||||
echo "YOUR_PUBLIC_KEY_HERE" > /home/deploy/.ssh/authorized_keys
|
||||
chown -R deploy:deploy /home/deploy/.ssh
|
||||
chmod 700 /home/deploy/.ssh
|
||||
chmod 600 /home/deploy/.ssh/authorized_keys
|
||||
|
||||
# Disable root SSH + password auth
|
||||
sed -i 's/^PermitRootLogin.*/PermitRootLogin no/' /etc/ssh/sshd_config
|
||||
sed -i 's/^PasswordAuthentication.*/PasswordAuthentication no/' /etc/ssh/sshd_config
|
||||
systemctl restart sshd
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. DNS Configuration
|
||||
|
||||
### Required records
|
||||
|
||||
| Name | Type | Value | TTL |
|
||||
|------|------|-------|-----|
|
||||
| `api-index.org` | A | VPS IPv4 | 300 |
|
||||
| `api-index.org` | AAAA | VPS IPv6 | 300 |
|
||||
| `www.api-index.org` | CNAME | `api-index.org` | 3600 |
|
||||
|
||||
Set TTL to 300 (5 min) before the cutover so propagation is fast.
|
||||
After CDN is live (Step 10), change the A/AAAA records to the Bunny.net CNAME instead.
|
||||
|
||||
### After CDN setup (replace A records)
|
||||
```
|
||||
api-index.org CNAME <bunnynet-cname>.b-cdn.net
|
||||
```
|
||||
|
||||
Caddy still handles TLS on the VPS. Bunny.net terminates the edge TLS and forwards
|
||||
to the VPS over HTTPS using the Caddy certificate.
|
||||
|
||||
---
|
||||
|
||||
## 5. Server Bootstrap
|
||||
|
||||
SSH in as `deploy` and run:
|
||||
|
||||
```bash
|
||||
# 1. System update
|
||||
sudo apt-get update && sudo apt-get upgrade -y
|
||||
|
||||
# 2. Docker (official repo — apt package is outdated)
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg \
|
||||
| sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
||||
|
||||
echo "deb [arch=$(dpkg --print-architecture) \
|
||||
signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] \
|
||||
https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \
|
||||
| sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||
|
||||
# 3. Add deploy user to docker group (logout + login to apply)
|
||||
sudo usermod -aG docker deploy
|
||||
|
||||
# 4. Install Promtail (for Loki integration — see Step 11)
|
||||
PROMTAIL_VERSION="3.0.0"
|
||||
wget -q "https://github.com/grafana/loki/releases/download/v${PROMTAIL_VERSION}/promtail-linux-amd64.zip"
|
||||
unzip -q promtail-linux-amd64.zip
|
||||
sudo mv promtail-linux-amd64 /usr/local/bin/promtail
|
||||
sudo chmod +x /usr/local/bin/promtail
|
||||
rm promtail-linux-amd64.zip
|
||||
|
||||
# 5. Clone the repository
|
||||
git clone https://gitea.your-server.example/botstandards/apix-mvp.git /opt/apix
|
||||
# Or using GitHub mirror during MVP phase:
|
||||
git clone https://github.com/your-org/apix-mvp.git /opt/apix
|
||||
|
||||
cd /opt/apix
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. Application Build
|
||||
|
||||
Build JARs locally and copy to the VPS, or build directly on the VPS if Java 21 is installed.
|
||||
|
||||
### Build locally (recommended for CI phase)
|
||||
```bash
|
||||
# On your dev machine:
|
||||
cd bot-service-index/apix-mvp
|
||||
mvn clean package -DskipTests
|
||||
|
||||
# Copy artifacts to VPS
|
||||
scp apix-registry/target/quarkus-app/ deploy@<vps-ip>:/opt/apix/apix-registry/target/quarkus-app/ -r
|
||||
scp apix-spider/target/quarkus-app/ deploy@<vps-ip>:/opt/apix/apix-spider/target/quarkus-app/ -r
|
||||
scp apix-portal/target/quarkus-app/ deploy@<vps-ip>:/opt/apix/apix-portal/target/quarkus-app/ -r
|
||||
```
|
||||
|
||||
### Build on VPS (MVP shortcut)
|
||||
```bash
|
||||
# Install Java 21 on VPS
|
||||
sudo apt-get install -y wget
|
||||
wget -q "https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.3%2B9/OpenJDK21U-jdk_x64_linux_hotspot_21.0.3_9.tar.gz" \
|
||||
-O /tmp/jdk21.tar.gz
|
||||
sudo mkdir -p /opt/java
|
||||
sudo tar xzf /tmp/jdk21.tar.gz -C /opt/java
|
||||
sudo ln -sf /opt/java/jdk-21.0.3+9/bin/java /usr/local/bin/java
|
||||
sudo ln -sf /opt/java/jdk-21.0.3+9/bin/javac /usr/local/bin/javac
|
||||
|
||||
# Install Maven
|
||||
sudo apt-get install -y maven
|
||||
|
||||
# Build
|
||||
cd /opt/apix
|
||||
mvn clean package -DskipTests
|
||||
```
|
||||
|
||||
### Docker images
|
||||
|
||||
Dockerfiles are defined in WORKLOG Block 5 (I-04 to I-06) and not yet created.
|
||||
Until they exist, run the JARs directly via `quarkus:dev` or write minimal Dockerfiles:
|
||||
|
||||
```dockerfile
|
||||
# infra/Dockerfile.registry (placeholder until Block 5)
|
||||
FROM eclipse-temurin:21-jre-alpine
|
||||
WORKDIR /app
|
||||
COPY apix-registry/target/quarkus-app/ quarkus-app/
|
||||
EXPOSE 8180
|
||||
ENTRYPOINT ["java", "-jar", "quarkus-app/quarkus-run.jar"]
|
||||
```
|
||||
|
||||
Repeat for `Dockerfile.spider` (port 8082) and `Dockerfile.portal` (port 8081).
|
||||
|
||||
Build and tag:
|
||||
```bash
|
||||
cd /opt/apix
|
||||
docker build -f infra/Dockerfile.registry -t apix-registry:latest .
|
||||
docker build -f infra/Dockerfile.spider -t apix-spider:latest .
|
||||
docker build -f infra/Dockerfile.portal -t apix-portal:latest .
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. Environment Configuration
|
||||
|
||||
Create `/opt/apix/.env` from the template below. This file is in `.gitignore` — never commit it.
|
||||
|
||||
```bash
|
||||
# /opt/apix/.env — production values
|
||||
|
||||
# ── Database ──────────────────────────────────────────────────────────────────
|
||||
APIX_DB_USER=apix
|
||||
APIX_DB_PASSWORD=<generate: openssl rand -base64 32>
|
||||
APIX_DB_NAME=apix
|
||||
APIX_DB_PORT=5432 # Only exposed inside Docker network in production
|
||||
|
||||
# ── API security ─────────────────────────────────────────────────────────────
|
||||
# Used to authenticate write requests (POST /services, PATCH /services/*, etc.)
|
||||
# Rotate this key when onboarding new registrars.
|
||||
APIX_API_KEY=<generate: openssl rand -hex 32>
|
||||
|
||||
# ── Registry identity ─────────────────────────────────────────────────────────
|
||||
APIX_REGISTRY_BASE_URL=https://api-index.org
|
||||
APIX_REGISTRY_NAME=APIX Registry
|
||||
APIX_REGISTRY_DESCRIPTION=The open autonomous agent service discovery registry.
|
||||
|
||||
# ── Verification integrations ────────────────────────────────────────────────
|
||||
GLEIF_API_URL=https://api.gleif.org/api/v1
|
||||
OPENCORPORATES_API_KEY=<your OpenCorporates API key — leave blank if not yet contracted>
|
||||
APIX_VERIFICATION_TIMEOUT_MS=5000
|
||||
|
||||
# ── Mail signing (Ed25519) ────────────────────────────────────────────────────
|
||||
# Leave blank on first deploy — ephemeral key generated at startup.
|
||||
# Set before production: openssl genpkey -algorithm ed25519 | ...
|
||||
APIX_MAIL_SIGNING_PRIVATE_KEY=
|
||||
APIX_MAIL_SIGNING_PUBLIC_KEY=
|
||||
APIX_MAIL_SIGNING_KID=2026-05
|
||||
|
||||
# ── Spider ────────────────────────────────────────────────────────────────────
|
||||
SPIDER_INTERVAL_MINUTES=15
|
||||
|
||||
# ── Grafana ───────────────────────────────────────────────────────────────────
|
||||
GRAFANA_ADMIN_PASSWORD=<generate: openssl rand -base64 16>
|
||||
GRAFANA_ROOT_URL=https://grafana.api-index.org # or http://localhost:3000 if SSH tunnel only
|
||||
|
||||
# ── Logging ───────────────────────────────────────────────────────────────────
|
||||
LOG_LEVEL=INFO
|
||||
```
|
||||
|
||||
Generate secrets in one pass:
|
||||
```bash
|
||||
echo "APIX_DB_PASSWORD=$(openssl rand -base64 32)"
|
||||
echo "APIX_API_KEY=$(openssl rand -hex 32)"
|
||||
echo "GRAFANA_ADMIN_PASSWORD=$(openssl rand -base64 16)"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8. Deploy the Stack
|
||||
|
||||
```bash
|
||||
cd /opt/apix/infra
|
||||
|
||||
# Start everything
|
||||
docker compose --env-file ../.env up -d
|
||||
|
||||
# Watch startup logs
|
||||
docker compose logs -f --tail=50
|
||||
|
||||
# Verify all services are healthy
|
||||
docker compose ps
|
||||
```
|
||||
|
||||
Expected healthy services after ~60 seconds:
|
||||
|
||||
| Service | Health endpoint | Expected |
|
||||
|---------|----------------|---------|
|
||||
| `db` | `pg_isready` | healthy |
|
||||
| `registry` | `http://localhost:8180/q/health/live` | `{"status":"UP"}` |
|
||||
| `spider` | `http://localhost:8082/q/health/live` | `{"status":"UP"}` |
|
||||
| `portal` | `http://localhost:8081/q/health/live` | `{"status":"UP"}` |
|
||||
| `prometheus` | `http://localhost:9090/-/healthy` | `Prometheus Server is Healthy.` |
|
||||
| `grafana` | `http://localhost:3000/api/health` | `{"database":"ok"}` |
|
||||
|
||||
Quick smoke test (from VPS):
|
||||
```bash
|
||||
# Registry root (HATEOAS navigation)
|
||||
curl -s http://localhost:8180/ | python3 -m json.tool
|
||||
|
||||
# Metrics endpoint (Prometheus scrape target)
|
||||
curl -s http://localhost:8180/q/metrics | grep apix_search
|
||||
|
||||
# Search endpoint
|
||||
curl -s "http://localhost:8180/services?capability=nlp" | python3 -m json.tool
|
||||
```
|
||||
|
||||
### Liquibase note
|
||||
|
||||
Liquibase runs automatically at startup (`quarkus.liquibase.migrate-at-start=true`).
|
||||
If the changelog is missing (`db/changelog/db.changelog-master.xml`), the registry will
|
||||
fail to start. Check logs with `docker compose logs registry` and ensure migrations
|
||||
are present (WORKLOG Block 1 / C-20 to C-24).
|
||||
|
||||
---
|
||||
|
||||
## 9. Caddy TLS Reverse Proxy
|
||||
|
||||
Create `infra/Caddyfile`:
|
||||
|
||||
```caddy
|
||||
# infra/Caddyfile
|
||||
|
||||
api-index.org {
|
||||
# Public API — registry
|
||||
handle /services* { reverse_proxy registry:8180 }
|
||||
handle /devices* { reverse_proxy registry:8180 }
|
||||
handle /organizations* { reverse_proxy registry:8180 }
|
||||
handle /mail-signing-keys { reverse_proxy registry:8180 }
|
||||
handle / { reverse_proxy registry:8180 }
|
||||
|
||||
# Caddy does not forward /q/* to CDN — Quarkus internals only
|
||||
handle /q/* { reverse_proxy registry:8180 }
|
||||
|
||||
# Rate limiting (requires caddy-ratelimit plugin or enterprise)
|
||||
# Basic protection: Caddy's built-in connection limit
|
||||
header {
|
||||
Strict-Transport-Security "max-age=31536000; includeSubDomains"
|
||||
X-Content-Type-Options "nosniff"
|
||||
X-Frame-Options "DENY"
|
||||
}
|
||||
|
||||
log {
|
||||
output file /var/log/caddy/api-index.log
|
||||
format json
|
||||
}
|
||||
}
|
||||
|
||||
# Portal — separate subdomain (optional)
|
||||
portal.api-index.org {
|
||||
reverse_proxy portal:8081
|
||||
header Strict-Transport-Security "max-age=31536000; includeSubDomains"
|
||||
}
|
||||
|
||||
# Grafana — restrict to internal access or require basic auth
|
||||
grafana.api-index.org {
|
||||
basicauth {
|
||||
# htpasswd -nb admin <password> — generate and paste hash here
|
||||
admin $2a$14$REPLACE_WITH_BCRYPT_HASH
|
||||
}
|
||||
reverse_proxy grafana:3000
|
||||
}
|
||||
```
|
||||
|
||||
Caddy fetches TLS certificates from Let's Encrypt automatically on first request.
|
||||
No manual certificate management needed.
|
||||
|
||||
Rebuild the `caddy` container to pick up the new Caddyfile:
|
||||
```bash
|
||||
cd /opt/apix/infra
|
||||
docker compose restart caddy
|
||||
docker compose logs caddy -f
|
||||
```
|
||||
|
||||
Verify TLS:
|
||||
```bash
|
||||
curl -sv https://api-index.org/ 2>&1 | grep -E "SSL|certificate|subject"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Bunny.net CDN Setup
|
||||
|
||||
The CDN sits in front of Caddy/registry and handles ~95% of read traffic from cache.
|
||||
|
||||
### One-time setup
|
||||
```bash
|
||||
cd /opt/apix
|
||||
|
||||
# With Loki log forwarding (strongly recommended for observability):
|
||||
BUNNYNET_API_KEY=your-key \
|
||||
ORIGIN_URL=https://api-index.org \
|
||||
CUSTOM_HOSTNAME=api-index.org \
|
||||
SYSLOG_HOST=<vps-public-ip> \
|
||||
SYSLOG_PORT=5514 \
|
||||
./scripts/setup-bunnynet.sh
|
||||
```
|
||||
|
||||
The script:
|
||||
1. Creates a pull zone pointing at `https://api-index.org`
|
||||
2. Enables query-string vary cache (so `?capability=nlp` and `?capability=translation`
|
||||
are cached as separate entries — critical for correct cache behavior)
|
||||
3. Sets edge TTL to follow origin `Cache-Control` headers (registry sets `max-age=30`
|
||||
on `/services` and `/devices`; `max-age=60` on `/`)
|
||||
4. Adds the `api-index.org` custom hostname
|
||||
5. Adds an edge rule to bypass cache for `/q/*` (Quarkus health/metrics endpoints)
|
||||
6. Enables real-time syslog forwarding to Promtail (when `SYSLOG_HOST` is set)
|
||||
7. Prints the CNAME value for your DNS record
|
||||
|
||||
### Update DNS to point to CDN edge
|
||||
After the script prints the CDN hostname:
|
||||
```
|
||||
api-index.org CNAME apix-registry.b-cdn.net
|
||||
```
|
||||
|
||||
Remove the A/AAAA records pointing directly to the VPS. The VPS is now origin-only.
|
||||
|
||||
### Verify CDN is caching
|
||||
|
||||
```bash
|
||||
# First request — cache MISS (origin hit)
|
||||
curl -sI https://api-index.org/services?capability=nlp | grep -i "cache\|x-cache\|age"
|
||||
|
||||
# Second request within 30s — cache HIT
|
||||
curl -sI https://api-index.org/services?capability=nlp | grep -i "cache\|x-cache\|age"
|
||||
|
||||
# From an Asian machine or VPN endpoint (tests geographic edge)
|
||||
curl -w "Total: %{time_total}s\n" -o /dev/null -s https://api-index.org/services?capability=nlp
|
||||
# Target: <20ms after warm-up
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 11. Live Telemetry: Promtail → Loki
|
||||
|
||||
Provides real-time telemetry of all CDN traffic (hits + misses) during demos.
|
||||
Origin Prometheus only sees cache misses — Loki sees everything.
|
||||
|
||||
### Grafana Cloud Loki credentials
|
||||
|
||||
In Grafana Cloud (`grafana.com`):
|
||||
1. Go to your stack → Loki → Details
|
||||
2. Note the **Push URL** (e.g. `https://logs-prod-eu-west-0.grafana.net/loki/api/v1/push`)
|
||||
3. Create a service account with `logs:write` scope → copy the token
|
||||
|
||||
### Configure Promtail
|
||||
|
||||
```bash
|
||||
# Copy config to system path
|
||||
sudo cp /opt/apix/scripts/promtail-cdn-logs.yaml /etc/promtail/cdn-logs.yaml
|
||||
|
||||
# Fill in credentials
|
||||
sudo sed -i 's|https://LOKI_PUSH_URL/loki/api/v1/push|https://logs-prod-eu-west-0.grafana.net/loki/api/v1/push|g' \
|
||||
/etc/promtail/cdn-logs.yaml
|
||||
sudo sed -i 's|"LOKI_USERNAME"|"123456"|g' /etc/promtail/cdn-logs.yaml # stack user ID
|
||||
sudo sed -i 's|"LOKI_PASSWORD"|"your-token-here"|g' /etc/promtail/cdn-logs.yaml
|
||||
```
|
||||
|
||||
Or edit directly: `sudo nano /etc/promtail/cdn-logs.yaml` — replace the three
|
||||
`LOKI_PUSH_URL`, `LOKI_USERNAME`, `LOKI_PASSWORD` placeholders.
|
||||
|
||||
### Create systemd service
|
||||
|
||||
```bash
|
||||
sudo tee /etc/systemd/system/promtail.service > /dev/null <<'EOF'
|
||||
[Unit]
|
||||
Description=Promtail — Bunny.net CDN log forwarder
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=nobody
|
||||
Group=nogroup
|
||||
ExecStart=/usr/local/bin/promtail -config.file=/etc/promtail/cdn-logs.yaml
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now promtail
|
||||
sudo systemctl status promtail
|
||||
```
|
||||
|
||||
### Verify the pipeline end-to-end
|
||||
|
||||
```bash
|
||||
# 1. Make a test request through the CDN
|
||||
curl -s "https://api-index.org/services?capability=nlp" > /dev/null
|
||||
|
||||
# 2. Check Promtail received it (should appear within 1-2 seconds)
|
||||
sudo journalctl -u promtail -f --no-pager
|
||||
|
||||
# 3. In Grafana Explore (Loki datasource):
|
||||
# {job="apix-cdn"} — should show a log line from the test request
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 12. Grafana Dashboards
|
||||
|
||||
### Add Loki datasource to Grafana
|
||||
|
||||
The stack's provisioned Prometheus datasource is auto-loaded. Add Loki manually or via provisioning:
|
||||
|
||||
```yaml
|
||||
# infra/grafana/provisioning/datasources/loki.yml
|
||||
apiVersion: 1
|
||||
datasources:
|
||||
- name: Loki
|
||||
type: loki
|
||||
access: proxy
|
||||
url: https://logs-prod-eu-west-0.grafana.net
|
||||
basicAuth: true
|
||||
basicAuthUser: "123456" # Grafana Cloud stack user ID
|
||||
secureJsonData:
|
||||
basicAuthPassword: "your-loki-token"
|
||||
isDefault: false
|
||||
editable: false
|
||||
```
|
||||
|
||||
Restart Grafana to apply:
|
||||
```bash
|
||||
docker compose restart grafana
|
||||
```
|
||||
|
||||
### Import the OpenClaw demo dashboard
|
||||
|
||||
```bash
|
||||
# Copy the dashboard JSON to the provisioning directory
|
||||
cp /opt/apix/scripts/grafana-demo-dashboard.json \
|
||||
/opt/apix/infra/grafana/provisioning/dashboards/demo-openclaw.json
|
||||
```
|
||||
|
||||
Grafana auto-discovers dashboards in the provisioning path (30s poll interval per `provider.yml`).
|
||||
No manual import needed.
|
||||
|
||||
Alternatively, import via UI:
|
||||
1. Grafana → Dashboards → Import
|
||||
2. Upload `scripts/grafana-demo-dashboard.json`
|
||||
3. Select the Loki and Prometheus datasources when prompted
|
||||
|
||||
### Dashboard refresh for demo sessions
|
||||
|
||||
The demo dashboard is pre-configured with:
|
||||
- **Refresh:** 5 seconds
|
||||
- **Time range:** Last 15 minutes
|
||||
- **Auto-play:** Enable via Grafana's kiosk mode for the demo screen
|
||||
|
||||
Kiosk mode URL (hides nav bar):
|
||||
```
|
||||
https://grafana.api-index.org/d/apix-demo-openclaw/apix-registry-demo?kiosk&refresh=5s
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 13. Weekly Analytics (Bunny.net Logs)
|
||||
|
||||
Bunny.net stores gzip access logs (one file per day). The `query-report.sh` script
|
||||
downloads them, parses them, and produces a capability frequency report.
|
||||
|
||||
```bash
|
||||
# Basic report — last 7 days
|
||||
BUNNYNET_API_KEY=your-key \
|
||||
PULL_ZONE_ID=$(cat /opt/apix/.bunnynet-pull-zone-id) \
|
||||
./scripts/query-report.sh
|
||||
```
|
||||
|
||||
With Prometheus Pushgateway (builds a weekly time-series in Grafana):
|
||||
```bash
|
||||
BUNNYNET_API_KEY=your-key \
|
||||
PULL_ZONE_ID=$(cat /opt/apix/.bunnynet-pull-zone-id) \
|
||||
PROMETHEUS_PUSH_URL=https://pushgateway.your-grafana.example/metrics/job/apix-cdn-report \
|
||||
DAYS=7 \
|
||||
./scripts/query-report.sh
|
||||
```
|
||||
|
||||
### Weekly cron job (on VPS)
|
||||
|
||||
Pass `INSTALL_CRON=true` when running `setup-bunnynet.sh` and the cron entry is
|
||||
installed automatically — no manual `crontab -e` needed:
|
||||
|
||||
```bash
|
||||
BUNNYNET_API_KEY=your-key \
|
||||
ORIGIN_URL=https://api-index.org \
|
||||
INSTALL_CRON=true \
|
||||
./scripts/setup-bunnynet.sh
|
||||
```
|
||||
|
||||
The script installs a `crontab` entry for the current user (Mondays 06:00) and
|
||||
deduplicates on re-run — safe to call again after rotating the API key.
|
||||
Verify with: `crontab -l | grep query-report`
|
||||
|
||||
The report answers:
|
||||
- Which capabilities are queried most (all requests, including CDN hits)
|
||||
- Cache hit ratio per endpoint
|
||||
- Geographic distribution (PoP breakdown)
|
||||
- Top query string combinations
|
||||
|
||||
---
|
||||
|
||||
## 14. Verification Checklist
|
||||
|
||||
Run through this after each deployment.
|
||||
|
||||
### Registry
|
||||
```bash
|
||||
# HATEOAS root
|
||||
curl -s https://api-index.org/ | python3 -m json.tool
|
||||
|
||||
# Search (returns empty array if no services registered yet — correct)
|
||||
curl -s "https://api-index.org/services?capability=nlp"
|
||||
|
||||
# Health
|
||||
curl -s https://api-index.org/q/health | python3 -m json.tool
|
||||
|
||||
# Cache-Control header on search endpoint
|
||||
curl -sI "https://api-index.org/services?capability=nlp" | grep -i cache-control
|
||||
# Expected: Cache-Control: public, max-age=30
|
||||
```
|
||||
|
||||
### CDN
|
||||
```bash
|
||||
# Second request should be a cache HIT
|
||||
curl -sI "https://api-index.org/services?capability=nlp" | grep -i cache
|
||||
# Expected: X-Cache: HIT (or similar from Bunny.net)
|
||||
|
||||
# Edge latency test — run from a machine outside Germany
|
||||
curl -w "DNS: %{time_namelookup}s Connect: %{time_connect}s Total: %{time_total}s\n" \
|
||||
-o /dev/null -s "https://api-index.org/services?capability=nlp"
|
||||
# Target: Total <20ms from Asia/US after warm-up
|
||||
```
|
||||
|
||||
### Observability
|
||||
```bash
|
||||
# Prometheus scraping registry
|
||||
curl -s http://localhost:9090/api/v1/targets | python3 -c \
|
||||
"import sys,json; [print(t['labels']['job'], t['health']) for t in json.load(sys.stdin)['data']['activeTargets']]"
|
||||
|
||||
# Loki receiving CDN logs
|
||||
# In Grafana Explore: {job="apix-cdn"} | last 5 min should show entries
|
||||
```
|
||||
|
||||
### TLS
|
||||
```bash
|
||||
curl -sv https://api-index.org/ 2>&1 | grep -E "issuer|subject|expire"
|
||||
# Should show Let's Encrypt issuer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 15. Routine Operations
|
||||
|
||||
### Update application
|
||||
```bash
|
||||
cd /opt/apix
|
||||
|
||||
# Pull latest code
|
||||
git pull origin main
|
||||
|
||||
# Rebuild affected images
|
||||
docker build -f infra/Dockerfile.registry -t apix-registry:latest .
|
||||
|
||||
# Rolling restart
|
||||
docker compose up -d registry
|
||||
docker compose logs registry -f --tail=50
|
||||
```
|
||||
|
||||
### Rotate API key
|
||||
```bash
|
||||
NEW_KEY=$(openssl rand -hex 32)
|
||||
# Update .env
|
||||
sed -i "s/^APIX_API_KEY=.*/APIX_API_KEY=${NEW_KEY}/" /opt/apix/.env
|
||||
# Restart registry to pick up new key
|
||||
docker compose up -d registry
|
||||
echo "New key: ${NEW_KEY}"
|
||||
# Distribute to all registrar clients before the old key expires
|
||||
```
|
||||
|
||||
### Database backup
|
||||
```bash
|
||||
# Manual backup
|
||||
docker exec apix-infra-db-1 pg_dump -U apix apix | gzip > /opt/apix/backups/apix-$(date +%Y%m%d).sql.gz
|
||||
|
||||
# Automated daily backup via cron
|
||||
0 2 * * * docker exec apix-infra-db-1 pg_dump -U apix apix | gzip \
|
||||
> /opt/apix/backups/apix-$(date +\%Y\%m\%d).sql.gz
|
||||
|
||||
# Restore
|
||||
gunzip -c /opt/apix/backups/apix-20260101.sql.gz | docker exec -i apix-infra-db-1 psql -U apix apix
|
||||
```
|
||||
|
||||
### View logs
|
||||
```bash
|
||||
# Live registry logs
|
||||
docker compose -f /opt/apix/infra/docker-compose.yml logs registry -f
|
||||
|
||||
# Use the convenience scripts (from project root)
|
||||
./scripts/logs.sh registry
|
||||
./scripts/logs.sh spider
|
||||
./scripts/logs.sh portal
|
||||
|
||||
# Promtail (CDN log forwarder)
|
||||
sudo journalctl -u promtail -f
|
||||
```
|
||||
|
||||
### Purge CDN cache (after deploying schema changes)
|
||||
```bash
|
||||
curl -sf -X POST "https://api.bunny.net/pullzone/$(cat /opt/apix/.bunnynet-pull-zone-id)/purgeCache" \
|
||||
-H "AccessKey: ${BUNNYNET_API_KEY}"
|
||||
```
|
||||
|
||||
### Stop / restart the full stack
|
||||
```bash
|
||||
cd /opt/apix/infra
|
||||
./scripts/stop.sh # graceful stop
|
||||
./scripts/restart.sh # stop + start
|
||||
./scripts/reset.sh # WARNING: drops all volumes including DB data
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Appendix: Environment Variable Reference
|
||||
|
||||
All variables accepted by the `registry` container, sourced from `.env`:
|
||||
|
||||
| Variable | Default | Required | Description |
|
||||
|----------|---------|----------|-------------|
|
||||
| `QUARKUS_DATASOURCE_JDBC_URL` | `jdbc:postgresql://db:5432/apix` | yes | Database JDBC URL |
|
||||
| `QUARKUS_DATASOURCE_USERNAME` | `apix` | yes | DB username |
|
||||
| `QUARKUS_DATASOURCE_PASSWORD` | `apix` | yes | DB password — use a strong value |
|
||||
| `APIX_API_KEY` | `dev-insecure-key-change-in-prod` | yes | Write-endpoint auth key |
|
||||
| `APIX_REGISTRY_BASE_URL` | `http://localhost:8180` | yes | Used in HATEOAS links |
|
||||
| `GLEIF_API_URL` | `https://api.gleif.org/api/v1` | no | O2 verification: GLEIF REST API |
|
||||
| `OPENCORPORATES_API_KEY` | _(blank)_ | no | O2 verification: OpenCorporates |
|
||||
| `APIX_VERIFICATION_TIMEOUT_MS` | `5000` | no | HTTP timeout for verification calls |
|
||||
| `APIX_MAIL_SIGNING_PRIVATE_KEY` | _(blank)_ | no | Ed25519 private key, Base64; ephemeral if blank |
|
||||
| `APIX_MAIL_SIGNING_PUBLIC_KEY` | _(blank)_ | no | Ed25519 public key, Base64 |
|
||||
| `APIX_MAIL_SIGNING_KID` | `dev` | no | Key ID in signed payloads; rotate every 6 months |
|
||||
| `SANCTIONS_CACHE_PATH` | `./sanctions-cache` | no | Local path for sanctions list cache |
|
||||
| `LOG_LEVEL` | `INFO` | no | `DEBUG` / `INFO` / `WARNING` / `ERROR` |
|
||||
Reference in New Issue
Block a user