webxos commited on
Commit
4c39e64
·
verified ·
1 Parent(s): a7948e6

Upload metadata.json

Browse files
Files changed (1) hide show
  1. metadata.json +125 -0
metadata.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_info": {
3
+ "name": "BCI-FPS_MOTOR_IMAGERY_Dataset",
4
+ "description": "High-bandwidth neural training data for BCI research. Mode: motor_imagery",
5
+ "version": "1.0.0",
6
+ "license": "MIT",
7
+ "citation": "@misc{bci_fps_motor_imagery_2024,\n title={BCI-FPS motor_imagery Training Dataset},\n author={Neuralink Research},\n year={2024},\n note={High-frequency intent decoding data for brain-computer interface development}\n}",
8
+ "data_schema": {
9
+ "neural_data": {
10
+ "timestamp": "UNIX timestamp in milliseconds",
11
+ "session_time": "Time since session start in milliseconds",
12
+ "channels": "Object mapping channel names to neural signal values",
13
+ "intent_context": "Contextual information about user intent"
14
+ },
15
+ "intent_stream": {
16
+ "timestamp": "UNIX timestamp in milliseconds",
17
+ "mouse": "Mouse position and movement data",
18
+ "keyboard": "Keyboard state",
19
+ "camera": "Camera position and rotation",
20
+ "environment": "Game environment state"
21
+ },
22
+ "handwriting_samples": {
23
+ "letter": "Letter being traced",
24
+ "samples": "Array of handwriting samples with position and pressure data"
25
+ }
26
+ },
27
+ "research_applications": [
28
+ "Motor imagery decoding for prosthetic control",
29
+ "Simultaneous intent decoding for fluid BCI interfaces",
30
+ "Visual evoked potential (c-VEP) calibration",
31
+ "Handwriting intent recognition for text entry",
32
+ "Neural network training for brain-computer interfaces"
33
+ ]
34
+ },
35
+ "session_info": {
36
+ "session_id": "bci_fps_motor_imagery_1767171179245",
37
+ "mode": "motor_imagery",
38
+ "start_time": "2025-12-31T08:52:07.033Z",
39
+ "duration_ms": 52212,
40
+ "sampling_rate_hz": 1000,
41
+ "neural_channels": 32
42
+ },
43
+ "features": {
44
+ "timestamp": "int64",
45
+ "session_time": "int64",
46
+ "mouse": {
47
+ "position": "list<float64>",
48
+ "delta": "list<int64>",
49
+ "buttons": "int64"
50
+ },
51
+ "keyboard": {
52
+ "mouse": "bool"
53
+ },
54
+ "camera": {
55
+ "position": "list<float64>",
56
+ "rotation": "list<float64>"
57
+ },
58
+ "environment": {
59
+ "active_targets": [
60
+ {
61
+ "position": "list<float64>",
62
+ "distance": "float64"
63
+ }
64
+ ],
65
+ "fps": "int64"
66
+ },
67
+ "type": "string",
68
+ "button": "string",
69
+ "target": "null",
70
+ "neural_context": {
71
+ "channel_0": "float64",
72
+ "channel_1": "float64",
73
+ "channel_2": "float64",
74
+ "channel_3": "float64",
75
+ "channel_4": "float64",
76
+ "channel_5": "float64",
77
+ "channel_6": "float64",
78
+ "channel_7": "float64",
79
+ "channel_8": "float64",
80
+ "channel_9": "float64",
81
+ "channel_10": "float64",
82
+ "channel_11": "float64",
83
+ "channel_12": "float64",
84
+ "channel_13": "float64",
85
+ "channel_14": "float64",
86
+ "channel_15": "float64",
87
+ "channel_16": "float64",
88
+ "channel_17": "float64",
89
+ "channel_18": "float64",
90
+ "channel_19": "float64",
91
+ "channel_20": "float64",
92
+ "channel_21": "float64",
93
+ "channel_22": "float64",
94
+ "channel_23": "float64",
95
+ "channel_24": "float64",
96
+ "channel_25": "float64",
97
+ "channel_26": "float64",
98
+ "channel_27": "float64",
99
+ "channel_28": "float64",
100
+ "channel_29": "float64",
101
+ "channel_30": "float64",
102
+ "channel_31": "float64"
103
+ }
104
+ },
105
+ "huggingface": {
106
+ "compatible": true,
107
+ "task_categories": [
108
+ "brain-computer-interface",
109
+ "neural-decoding",
110
+ "human-computer-interaction"
111
+ ],
112
+ "task_ids": [
113
+ "motor-imagery",
114
+ "intent-decoding",
115
+ "visual-evoked-potentials",
116
+ "handwriting-recognition"
117
+ ],
118
+ "language": [
119
+ "en"
120
+ ],
121
+ "size_categories": [
122
+ "10K<n<100K"
123
+ ]
124
+ }
125
+ }