tasq/node_modules/@ruvector/attention/package.json

18 lines
792 B
JSON

{
"name": "@ruvector/attention",
"version": "0.1.32",
"description": "High-performance attention mechanisms with 7 mathematical theories: Optimal Transport, Mixed Curvature, Topology, Information Geometry, Information Bottleneck, PDE/Diffusion, Unified Diagnostics",
"main": "index.js",
"types": "index.d.ts",
"license": "MIT",
"repository": "https://github.com/ruvnet/ruvector",
"keywords": ["attention", "transformer", "machine-learning", "optimal-transport", "hyperbolic", "topology"],
"optionalDependencies": {
"@ruvector/attention-linux-x64-gnu": "0.1.32",
"@ruvector/attention-linux-arm64-gnu": "0.1.32",
"@ruvector/attention-darwin-x64": "0.1.32",
"@ruvector/attention-darwin-arm64": "0.1.32",
"@ruvector/attention-win32-x64-msvc": "0.1.32"
}
}