GET
/
cloud
/
v3
/
inference
/
models
/
{model_id}
import os
from gcore import Gcore

client = Gcore(
    api_key=os.environ.get("GCORE_API_KEY"),  # This is the default and can be omitted
)
mlcatalog_model_card = client.cloud.inference.models.get(
    "model_id",
)
print(mlcatalog_model_card.id)
{
  "category": "Text Classification",
  "default_flavor_name": "inference-16vcpu-232gib-1xh100-80gb",
  "description": "My first model",
  "developer": "Stability AI",
  "documentation_page": "/docs",
  "eula_url": "https://example.com/eula",
  "example_curl_request": "curl -X POST http://localhost:8080/predict -d '{\"data\": \"sample\"}'",
  "has_eula": true,
  "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
  "image_registry_id": "123e4567-e89b-12d3-a456-426614174999",
  "image_url": "registry.hub.docker.com/my_model:latest",
  "inference_backend": "torch",
  "inference_frontend": "gradio",
  "model_id": "mistralai/Pixtral-12B-2409",
  "name": "model1",
  "openai_compatibility": "full",
  "port": 8080,
  "version": "v0.1"
}

Authorizations

APIKey
string
header
required

API key for authentication.

Path Parameters

model_id
string
required

Model ID

Response

200 - application/json

OK

The response is of type object.