2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
24 * Authors: Dave Airlie
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
46 * r100,rv100,rs100,rv200,rs200
48 extern int r100_init(struct radeon_device *rdev);
49 extern void r100_fini(struct radeon_device *rdev);
50 extern int r100_suspend(struct radeon_device *rdev);
51 extern int r100_resume(struct radeon_device *rdev);
52 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
53 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
54 void r100_vga_set_state(struct radeon_device *rdev, bool state);
55 int r100_gpu_reset(struct radeon_device *rdev);
56 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
57 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
58 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
59 void r100_cp_commit(struct radeon_device *rdev);
60 void r100_ring_start(struct radeon_device *rdev);
61 int r100_irq_set(struct radeon_device *rdev);
62 int r100_irq_process(struct radeon_device *rdev);
63 void r100_fence_ring_emit(struct radeon_device *rdev,
64 struct radeon_fence *fence);
65 int r100_cs_parse(struct radeon_cs_parser *p);
66 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
67 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
68 int r100_copy_blit(struct radeon_device *rdev,
72 struct radeon_fence *fence);
73 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
74 uint32_t tiling_flags, uint32_t pitch,
75 uint32_t offset, uint32_t obj_size);
76 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
77 void r100_bandwidth_update(struct radeon_device *rdev);
78 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
79 int r100_ring_test(struct radeon_device *rdev);
80 void r100_hpd_init(struct radeon_device *rdev);
81 void r100_hpd_fini(struct radeon_device *rdev);
82 bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
83 void r100_hpd_set_polarity(struct radeon_device *rdev,
84 enum radeon_hpd_id hpd);
86 static struct radeon_asic r100_asic = {
89 .suspend = &r100_suspend,
90 .resume = &r100_resume,
91 .vga_set_state = &r100_vga_set_state,
92 .gpu_reset = &r100_gpu_reset,
93 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
94 .gart_set_page = &r100_pci_gart_set_page,
95 .cp_commit = &r100_cp_commit,
96 .ring_start = &r100_ring_start,
97 .ring_test = &r100_ring_test,
98 .ring_ib_execute = &r100_ring_ib_execute,
99 .irq_set = &r100_irq_set,
100 .irq_process = &r100_irq_process,
101 .get_vblank_counter = &r100_get_vblank_counter,
102 .fence_ring_emit = &r100_fence_ring_emit,
103 .cs_parse = &r100_cs_parse,
104 .copy_blit = &r100_copy_blit,
106 .copy = &r100_copy_blit,
107 .get_engine_clock = &radeon_legacy_get_engine_clock,
108 .set_engine_clock = &radeon_legacy_set_engine_clock,
109 .get_memory_clock = &radeon_legacy_get_memory_clock,
110 .set_memory_clock = NULL,
111 .get_pcie_lanes = NULL,
112 .set_pcie_lanes = NULL,
113 .set_clock_gating = &radeon_legacy_set_clock_gating,
114 .set_surface_reg = r100_set_surface_reg,
115 .clear_surface_reg = r100_clear_surface_reg,
116 .bandwidth_update = &r100_bandwidth_update,
117 .hpd_init = &r100_hpd_init,
118 .hpd_fini = &r100_hpd_fini,
119 .hpd_sense = &r100_hpd_sense,
120 .hpd_set_polarity = &r100_hpd_set_polarity,
121 .ioctl_wait_idle = NULL,
125 * r200,rv250,rs300,rv280
127 extern int r200_copy_dma(struct radeon_device *rdev,
131 struct radeon_fence *fence);
132 static struct radeon_asic r200_asic = {
135 .suspend = &r100_suspend,
136 .resume = &r100_resume,
137 .vga_set_state = &r100_vga_set_state,
138 .gpu_reset = &r100_gpu_reset,
139 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
140 .gart_set_page = &r100_pci_gart_set_page,
141 .cp_commit = &r100_cp_commit,
142 .ring_start = &r100_ring_start,
143 .ring_test = &r100_ring_test,
144 .ring_ib_execute = &r100_ring_ib_execute,
145 .irq_set = &r100_irq_set,
146 .irq_process = &r100_irq_process,
147 .get_vblank_counter = &r100_get_vblank_counter,
148 .fence_ring_emit = &r100_fence_ring_emit,
149 .cs_parse = &r100_cs_parse,
150 .copy_blit = &r100_copy_blit,
151 .copy_dma = &r200_copy_dma,
152 .copy = &r100_copy_blit,
153 .get_engine_clock = &radeon_legacy_get_engine_clock,
154 .set_engine_clock = &radeon_legacy_set_engine_clock,
155 .get_memory_clock = &radeon_legacy_get_memory_clock,
156 .set_memory_clock = NULL,
157 .set_pcie_lanes = NULL,
158 .set_clock_gating = &radeon_legacy_set_clock_gating,
159 .set_surface_reg = r100_set_surface_reg,
160 .clear_surface_reg = r100_clear_surface_reg,
161 .bandwidth_update = &r100_bandwidth_update,
162 .hpd_init = &r100_hpd_init,
163 .hpd_fini = &r100_hpd_fini,
164 .hpd_sense = &r100_hpd_sense,
165 .hpd_set_polarity = &r100_hpd_set_polarity,
166 .ioctl_wait_idle = NULL,
171 * r300,r350,rv350,rv380
173 extern int r300_init(struct radeon_device *rdev);
174 extern void r300_fini(struct radeon_device *rdev);
175 extern int r300_suspend(struct radeon_device *rdev);
176 extern int r300_resume(struct radeon_device *rdev);
177 extern int r300_gpu_reset(struct radeon_device *rdev);
178 extern void r300_ring_start(struct radeon_device *rdev);
179 extern void r300_fence_ring_emit(struct radeon_device *rdev,
180 struct radeon_fence *fence);
181 extern int r300_cs_parse(struct radeon_cs_parser *p);
182 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
183 extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
184 extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
185 extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
186 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
187 extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
189 static struct radeon_asic r300_asic = {
192 .suspend = &r300_suspend,
193 .resume = &r300_resume,
194 .vga_set_state = &r100_vga_set_state,
195 .gpu_reset = &r300_gpu_reset,
196 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
197 .gart_set_page = &r100_pci_gart_set_page,
198 .cp_commit = &r100_cp_commit,
199 .ring_start = &r300_ring_start,
200 .ring_test = &r100_ring_test,
201 .ring_ib_execute = &r100_ring_ib_execute,
202 .irq_set = &r100_irq_set,
203 .irq_process = &r100_irq_process,
204 .get_vblank_counter = &r100_get_vblank_counter,
205 .fence_ring_emit = &r300_fence_ring_emit,
206 .cs_parse = &r300_cs_parse,
207 .copy_blit = &r100_copy_blit,
208 .copy_dma = &r200_copy_dma,
209 .copy = &r100_copy_blit,
210 .get_engine_clock = &radeon_legacy_get_engine_clock,
211 .set_engine_clock = &radeon_legacy_set_engine_clock,
212 .get_memory_clock = &radeon_legacy_get_memory_clock,
213 .set_memory_clock = NULL,
214 .get_pcie_lanes = &rv370_get_pcie_lanes,
215 .set_pcie_lanes = &rv370_set_pcie_lanes,
216 .set_clock_gating = &radeon_legacy_set_clock_gating,
217 .set_surface_reg = r100_set_surface_reg,
218 .clear_surface_reg = r100_clear_surface_reg,
219 .bandwidth_update = &r100_bandwidth_update,
220 .hpd_init = &r100_hpd_init,
221 .hpd_fini = &r100_hpd_fini,
222 .hpd_sense = &r100_hpd_sense,
223 .hpd_set_polarity = &r100_hpd_set_polarity,
224 .ioctl_wait_idle = NULL,
228 static struct radeon_asic r300_asic_pcie = {
231 .suspend = &r300_suspend,
232 .resume = &r300_resume,
233 .vga_set_state = &r100_vga_set_state,
234 .gpu_reset = &r300_gpu_reset,
235 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
236 .gart_set_page = &rv370_pcie_gart_set_page,
237 .cp_commit = &r100_cp_commit,
238 .ring_start = &r300_ring_start,
239 .ring_test = &r100_ring_test,
240 .ring_ib_execute = &r100_ring_ib_execute,
241 .irq_set = &r100_irq_set,
242 .irq_process = &r100_irq_process,
243 .get_vblank_counter = &r100_get_vblank_counter,
244 .fence_ring_emit = &r300_fence_ring_emit,
245 .cs_parse = &r300_cs_parse,
246 .copy_blit = &r100_copy_blit,
247 .copy_dma = &r200_copy_dma,
248 .copy = &r100_copy_blit,
249 .get_engine_clock = &radeon_legacy_get_engine_clock,
250 .set_engine_clock = &radeon_legacy_set_engine_clock,
251 .get_memory_clock = &radeon_legacy_get_memory_clock,
252 .set_memory_clock = NULL,
253 .set_pcie_lanes = &rv370_set_pcie_lanes,
254 .set_clock_gating = &radeon_legacy_set_clock_gating,
255 .set_surface_reg = r100_set_surface_reg,
256 .clear_surface_reg = r100_clear_surface_reg,
257 .bandwidth_update = &r100_bandwidth_update,
258 .hpd_init = &r100_hpd_init,
259 .hpd_fini = &r100_hpd_fini,
260 .hpd_sense = &r100_hpd_sense,
261 .hpd_set_polarity = &r100_hpd_set_polarity,
262 .ioctl_wait_idle = NULL,
268 extern int r420_init(struct radeon_device *rdev);
269 extern void r420_fini(struct radeon_device *rdev);
270 extern int r420_suspend(struct radeon_device *rdev);
271 extern int r420_resume(struct radeon_device *rdev);
272 static struct radeon_asic r420_asic = {
275 .suspend = &r420_suspend,
276 .resume = &r420_resume,
277 .vga_set_state = &r100_vga_set_state,
278 .gpu_reset = &r300_gpu_reset,
279 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
280 .gart_set_page = &rv370_pcie_gart_set_page,
281 .cp_commit = &r100_cp_commit,
282 .ring_start = &r300_ring_start,
283 .ring_test = &r100_ring_test,
284 .ring_ib_execute = &r100_ring_ib_execute,
285 .irq_set = &r100_irq_set,
286 .irq_process = &r100_irq_process,
287 .get_vblank_counter = &r100_get_vblank_counter,
288 .fence_ring_emit = &r300_fence_ring_emit,
289 .cs_parse = &r300_cs_parse,
290 .copy_blit = &r100_copy_blit,
291 .copy_dma = &r200_copy_dma,
292 .copy = &r100_copy_blit,
293 .get_engine_clock = &radeon_atom_get_engine_clock,
294 .set_engine_clock = &radeon_atom_set_engine_clock,
295 .get_memory_clock = &radeon_atom_get_memory_clock,
296 .set_memory_clock = &radeon_atom_set_memory_clock,
297 .get_pcie_lanes = &rv370_get_pcie_lanes,
298 .set_pcie_lanes = &rv370_set_pcie_lanes,
299 .set_clock_gating = &radeon_atom_set_clock_gating,
300 .set_surface_reg = r100_set_surface_reg,
301 .clear_surface_reg = r100_clear_surface_reg,
302 .bandwidth_update = &r100_bandwidth_update,
303 .hpd_init = &r100_hpd_init,
304 .hpd_fini = &r100_hpd_fini,
305 .hpd_sense = &r100_hpd_sense,
306 .hpd_set_polarity = &r100_hpd_set_polarity,
307 .ioctl_wait_idle = NULL,
314 extern int rs400_init(struct radeon_device *rdev);
315 extern void rs400_fini(struct radeon_device *rdev);
316 extern int rs400_suspend(struct radeon_device *rdev);
317 extern int rs400_resume(struct radeon_device *rdev);
318 void rs400_gart_tlb_flush(struct radeon_device *rdev);
319 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
320 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
321 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
322 static struct radeon_asic rs400_asic = {
325 .suspend = &rs400_suspend,
326 .resume = &rs400_resume,
327 .vga_set_state = &r100_vga_set_state,
328 .gpu_reset = &r300_gpu_reset,
329 .gart_tlb_flush = &rs400_gart_tlb_flush,
330 .gart_set_page = &rs400_gart_set_page,
331 .cp_commit = &r100_cp_commit,
332 .ring_start = &r300_ring_start,
333 .ring_test = &r100_ring_test,
334 .ring_ib_execute = &r100_ring_ib_execute,
335 .irq_set = &r100_irq_set,
336 .irq_process = &r100_irq_process,
337 .get_vblank_counter = &r100_get_vblank_counter,
338 .fence_ring_emit = &r300_fence_ring_emit,
339 .cs_parse = &r300_cs_parse,
340 .copy_blit = &r100_copy_blit,
341 .copy_dma = &r200_copy_dma,
342 .copy = &r100_copy_blit,
343 .get_engine_clock = &radeon_legacy_get_engine_clock,
344 .set_engine_clock = &radeon_legacy_set_engine_clock,
345 .get_memory_clock = &radeon_legacy_get_memory_clock,
346 .set_memory_clock = NULL,
347 .get_pcie_lanes = NULL,
348 .set_pcie_lanes = NULL,
349 .set_clock_gating = &radeon_legacy_set_clock_gating,
350 .set_surface_reg = r100_set_surface_reg,
351 .clear_surface_reg = r100_clear_surface_reg,
352 .bandwidth_update = &r100_bandwidth_update,
353 .hpd_init = &r100_hpd_init,
354 .hpd_fini = &r100_hpd_fini,
355 .hpd_sense = &r100_hpd_sense,
356 .hpd_set_polarity = &r100_hpd_set_polarity,
357 .ioctl_wait_idle = NULL,
364 extern int rs600_init(struct radeon_device *rdev);
365 extern void rs600_fini(struct radeon_device *rdev);
366 extern int rs600_suspend(struct radeon_device *rdev);
367 extern int rs600_resume(struct radeon_device *rdev);
368 int rs600_irq_set(struct radeon_device *rdev);
369 int rs600_irq_process(struct radeon_device *rdev);
370 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
371 void rs600_gart_tlb_flush(struct radeon_device *rdev);
372 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
373 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
374 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
375 void rs600_bandwidth_update(struct radeon_device *rdev);
376 void rs600_hpd_init(struct radeon_device *rdev);
377 void rs600_hpd_fini(struct radeon_device *rdev);
378 bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
379 void rs600_hpd_set_polarity(struct radeon_device *rdev,
380 enum radeon_hpd_id hpd);
382 static struct radeon_asic rs600_asic = {
385 .suspend = &rs600_suspend,
386 .resume = &rs600_resume,
387 .vga_set_state = &r100_vga_set_state,
388 .gpu_reset = &r300_gpu_reset,
389 .gart_tlb_flush = &rs600_gart_tlb_flush,
390 .gart_set_page = &rs600_gart_set_page,
391 .cp_commit = &r100_cp_commit,
392 .ring_start = &r300_ring_start,
393 .ring_test = &r100_ring_test,
394 .ring_ib_execute = &r100_ring_ib_execute,
395 .irq_set = &rs600_irq_set,
396 .irq_process = &rs600_irq_process,
397 .get_vblank_counter = &rs600_get_vblank_counter,
398 .fence_ring_emit = &r300_fence_ring_emit,
399 .cs_parse = &r300_cs_parse,
400 .copy_blit = &r100_copy_blit,
401 .copy_dma = &r200_copy_dma,
402 .copy = &r100_copy_blit,
403 .get_engine_clock = &radeon_atom_get_engine_clock,
404 .set_engine_clock = &radeon_atom_set_engine_clock,
405 .get_memory_clock = &radeon_atom_get_memory_clock,
406 .set_memory_clock = &radeon_atom_set_memory_clock,
407 .get_pcie_lanes = NULL,
408 .set_pcie_lanes = NULL,
409 .set_clock_gating = &radeon_atom_set_clock_gating,
410 .bandwidth_update = &rs600_bandwidth_update,
411 .hpd_init = &rs600_hpd_init,
412 .hpd_fini = &rs600_hpd_fini,
413 .hpd_sense = &rs600_hpd_sense,
414 .hpd_set_polarity = &rs600_hpd_set_polarity,
415 .ioctl_wait_idle = NULL,
422 int rs690_init(struct radeon_device *rdev);
423 void rs690_fini(struct radeon_device *rdev);
424 int rs690_resume(struct radeon_device *rdev);
425 int rs690_suspend(struct radeon_device *rdev);
426 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
427 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
428 void rs690_bandwidth_update(struct radeon_device *rdev);
429 static struct radeon_asic rs690_asic = {
432 .suspend = &rs690_suspend,
433 .resume = &rs690_resume,
434 .vga_set_state = &r100_vga_set_state,
435 .gpu_reset = &r300_gpu_reset,
436 .gart_tlb_flush = &rs400_gart_tlb_flush,
437 .gart_set_page = &rs400_gart_set_page,
438 .cp_commit = &r100_cp_commit,
439 .ring_start = &r300_ring_start,
440 .ring_test = &r100_ring_test,
441 .ring_ib_execute = &r100_ring_ib_execute,
442 .irq_set = &rs600_irq_set,
443 .irq_process = &rs600_irq_process,
444 .get_vblank_counter = &rs600_get_vblank_counter,
445 .fence_ring_emit = &r300_fence_ring_emit,
446 .cs_parse = &r300_cs_parse,
447 .copy_blit = &r100_copy_blit,
448 .copy_dma = &r200_copy_dma,
449 .copy = &r200_copy_dma,
450 .get_engine_clock = &radeon_atom_get_engine_clock,
451 .set_engine_clock = &radeon_atom_set_engine_clock,
452 .get_memory_clock = &radeon_atom_get_memory_clock,
453 .set_memory_clock = &radeon_atom_set_memory_clock,
454 .get_pcie_lanes = NULL,
455 .set_pcie_lanes = NULL,
456 .set_clock_gating = &radeon_atom_set_clock_gating,
457 .set_surface_reg = r100_set_surface_reg,
458 .clear_surface_reg = r100_clear_surface_reg,
459 .bandwidth_update = &rs690_bandwidth_update,
460 .hpd_init = &rs600_hpd_init,
461 .hpd_fini = &rs600_hpd_fini,
462 .hpd_sense = &rs600_hpd_sense,
463 .hpd_set_polarity = &rs600_hpd_set_polarity,
464 .ioctl_wait_idle = NULL,
471 int rv515_init(struct radeon_device *rdev);
472 void rv515_fini(struct radeon_device *rdev);
473 int rv515_gpu_reset(struct radeon_device *rdev);
474 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
475 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
476 void rv515_ring_start(struct radeon_device *rdev);
477 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
478 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
479 void rv515_bandwidth_update(struct radeon_device *rdev);
480 int rv515_resume(struct radeon_device *rdev);
481 int rv515_suspend(struct radeon_device *rdev);
482 static struct radeon_asic rv515_asic = {
485 .suspend = &rv515_suspend,
486 .resume = &rv515_resume,
487 .vga_set_state = &r100_vga_set_state,
488 .gpu_reset = &rv515_gpu_reset,
489 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
490 .gart_set_page = &rv370_pcie_gart_set_page,
491 .cp_commit = &r100_cp_commit,
492 .ring_start = &rv515_ring_start,
493 .ring_test = &r100_ring_test,
494 .ring_ib_execute = &r100_ring_ib_execute,
495 .irq_set = &rs600_irq_set,
496 .irq_process = &rs600_irq_process,
497 .get_vblank_counter = &rs600_get_vblank_counter,
498 .fence_ring_emit = &r300_fence_ring_emit,
499 .cs_parse = &r300_cs_parse,
500 .copy_blit = &r100_copy_blit,
501 .copy_dma = &r200_copy_dma,
502 .copy = &r100_copy_blit,
503 .get_engine_clock = &radeon_atom_get_engine_clock,
504 .set_engine_clock = &radeon_atom_set_engine_clock,
505 .get_memory_clock = &radeon_atom_get_memory_clock,
506 .set_memory_clock = &radeon_atom_set_memory_clock,
507 .get_pcie_lanes = &rv370_get_pcie_lanes,
508 .set_pcie_lanes = &rv370_set_pcie_lanes,
509 .set_clock_gating = &radeon_atom_set_clock_gating,
510 .set_surface_reg = r100_set_surface_reg,
511 .clear_surface_reg = r100_clear_surface_reg,
512 .bandwidth_update = &rv515_bandwidth_update,
513 .hpd_init = &rs600_hpd_init,
514 .hpd_fini = &rs600_hpd_fini,
515 .hpd_sense = &rs600_hpd_sense,
516 .hpd_set_polarity = &rs600_hpd_set_polarity,
517 .ioctl_wait_idle = NULL,
522 * r520,rv530,rv560,rv570,r580
524 int r520_init(struct radeon_device *rdev);
525 int r520_resume(struct radeon_device *rdev);
526 static struct radeon_asic r520_asic = {
529 .suspend = &rv515_suspend,
530 .resume = &r520_resume,
531 .vga_set_state = &r100_vga_set_state,
532 .gpu_reset = &rv515_gpu_reset,
533 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
534 .gart_set_page = &rv370_pcie_gart_set_page,
535 .cp_commit = &r100_cp_commit,
536 .ring_start = &rv515_ring_start,
537 .ring_test = &r100_ring_test,
538 .ring_ib_execute = &r100_ring_ib_execute,
539 .irq_set = &rs600_irq_set,
540 .irq_process = &rs600_irq_process,
541 .get_vblank_counter = &rs600_get_vblank_counter,
542 .fence_ring_emit = &r300_fence_ring_emit,
543 .cs_parse = &r300_cs_parse,
544 .copy_blit = &r100_copy_blit,
545 .copy_dma = &r200_copy_dma,
546 .copy = &r100_copy_blit,
547 .get_engine_clock = &radeon_atom_get_engine_clock,
548 .set_engine_clock = &radeon_atom_set_engine_clock,
549 .get_memory_clock = &radeon_atom_get_memory_clock,
550 .set_memory_clock = &radeon_atom_set_memory_clock,
551 .get_pcie_lanes = &rv370_get_pcie_lanes,
552 .set_pcie_lanes = &rv370_set_pcie_lanes,
553 .set_clock_gating = &radeon_atom_set_clock_gating,
554 .set_surface_reg = r100_set_surface_reg,
555 .clear_surface_reg = r100_clear_surface_reg,
556 .bandwidth_update = &rv515_bandwidth_update,
557 .hpd_init = &rs600_hpd_init,
558 .hpd_fini = &rs600_hpd_fini,
559 .hpd_sense = &rs600_hpd_sense,
560 .hpd_set_polarity = &rs600_hpd_set_polarity,
561 .ioctl_wait_idle = NULL,
565 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
567 int r600_init(struct radeon_device *rdev);
568 void r600_fini(struct radeon_device *rdev);
569 int r600_suspend(struct radeon_device *rdev);
570 int r600_resume(struct radeon_device *rdev);
571 void r600_vga_set_state(struct radeon_device *rdev, bool state);
572 int r600_wb_init(struct radeon_device *rdev);
573 void r600_wb_fini(struct radeon_device *rdev);
574 void r600_cp_commit(struct radeon_device *rdev);
575 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
576 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
577 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
578 int r600_cs_parse(struct radeon_cs_parser *p);
579 void r600_fence_ring_emit(struct radeon_device *rdev,
580 struct radeon_fence *fence);
581 int r600_copy_dma(struct radeon_device *rdev,
585 struct radeon_fence *fence);
586 int r600_irq_process(struct radeon_device *rdev);
587 int r600_irq_set(struct radeon_device *rdev);
588 int r600_gpu_reset(struct radeon_device *rdev);
589 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
590 uint32_t tiling_flags, uint32_t pitch,
591 uint32_t offset, uint32_t obj_size);
592 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
593 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
594 int r600_ring_test(struct radeon_device *rdev);
595 int r600_copy_blit(struct radeon_device *rdev,
596 uint64_t src_offset, uint64_t dst_offset,
597 unsigned num_pages, struct radeon_fence *fence);
598 void r600_hpd_init(struct radeon_device *rdev);
599 void r600_hpd_fini(struct radeon_device *rdev);
600 bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
601 void r600_hpd_set_polarity(struct radeon_device *rdev,
602 enum radeon_hpd_id hpd);
603 extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
605 static struct radeon_asic r600_asic = {
608 .suspend = &r600_suspend,
609 .resume = &r600_resume,
610 .cp_commit = &r600_cp_commit,
611 .vga_set_state = &r600_vga_set_state,
612 .gpu_reset = &r600_gpu_reset,
613 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
614 .gart_set_page = &rs600_gart_set_page,
615 .ring_test = &r600_ring_test,
616 .ring_ib_execute = &r600_ring_ib_execute,
617 .irq_set = &r600_irq_set,
618 .irq_process = &r600_irq_process,
619 .get_vblank_counter = &rs600_get_vblank_counter,
620 .fence_ring_emit = &r600_fence_ring_emit,
621 .cs_parse = &r600_cs_parse,
622 .copy_blit = &r600_copy_blit,
623 .copy_dma = &r600_copy_blit,
624 .copy = &r600_copy_blit,
625 .get_engine_clock = &radeon_atom_get_engine_clock,
626 .set_engine_clock = &radeon_atom_set_engine_clock,
627 .get_memory_clock = &radeon_atom_get_memory_clock,
628 .set_memory_clock = &radeon_atom_set_memory_clock,
629 .get_pcie_lanes = &rv370_get_pcie_lanes,
630 .set_pcie_lanes = NULL,
631 .set_clock_gating = NULL,
632 .set_surface_reg = r600_set_surface_reg,
633 .clear_surface_reg = r600_clear_surface_reg,
634 .bandwidth_update = &rv515_bandwidth_update,
635 .hpd_init = &r600_hpd_init,
636 .hpd_fini = &r600_hpd_fini,
637 .hpd_sense = &r600_hpd_sense,
638 .hpd_set_polarity = &r600_hpd_set_polarity,
639 .ioctl_wait_idle = r600_ioctl_wait_idle,
643 * rv770,rv730,rv710,rv740
645 int rv770_init(struct radeon_device *rdev);
646 void rv770_fini(struct radeon_device *rdev);
647 int rv770_suspend(struct radeon_device *rdev);
648 int rv770_resume(struct radeon_device *rdev);
649 int rv770_gpu_reset(struct radeon_device *rdev);
651 static struct radeon_asic rv770_asic = {
654 .suspend = &rv770_suspend,
655 .resume = &rv770_resume,
656 .cp_commit = &r600_cp_commit,
657 .gpu_reset = &rv770_gpu_reset,
658 .vga_set_state = &r600_vga_set_state,
659 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
660 .gart_set_page = &rs600_gart_set_page,
661 .ring_test = &r600_ring_test,
662 .ring_ib_execute = &r600_ring_ib_execute,
663 .irq_set = &r600_irq_set,
664 .irq_process = &r600_irq_process,
665 .get_vblank_counter = &rs600_get_vblank_counter,
666 .fence_ring_emit = &r600_fence_ring_emit,
667 .cs_parse = &r600_cs_parse,
668 .copy_blit = &r600_copy_blit,
669 .copy_dma = &r600_copy_blit,
670 .copy = &r600_copy_blit,
671 .get_engine_clock = &radeon_atom_get_engine_clock,
672 .set_engine_clock = &radeon_atom_set_engine_clock,
673 .get_memory_clock = &radeon_atom_get_memory_clock,
674 .set_memory_clock = &radeon_atom_set_memory_clock,
675 .get_pcie_lanes = &rv370_get_pcie_lanes,
676 .set_pcie_lanes = NULL,
677 .set_clock_gating = &radeon_atom_set_clock_gating,
678 .set_surface_reg = r600_set_surface_reg,
679 .clear_surface_reg = r600_clear_surface_reg,
680 .bandwidth_update = &rv515_bandwidth_update,
681 .hpd_init = &r600_hpd_init,
682 .hpd_fini = &r600_hpd_fini,
683 .hpd_sense = &r600_hpd_sense,
684 .hpd_set_polarity = &r600_hpd_set_polarity,
685 .ioctl_wait_idle = r600_ioctl_wait_idle,
691 int evergreen_init(struct radeon_device *rdev);
692 void evergreen_fini(struct radeon_device *rdev);
693 int evergreen_suspend(struct radeon_device *rdev);
694 int evergreen_resume(struct radeon_device *rdev);
695 int evergreen_gpu_reset(struct radeon_device *rdev);
696 void evergreen_bandwidth_update(struct radeon_device *rdev);
697 void evergreen_hpd_init(struct radeon_device *rdev);
698 void evergreen_hpd_fini(struct radeon_device *rdev);
699 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
700 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
701 enum radeon_hpd_id hpd);
703 static struct radeon_asic evergreen_asic = {
704 .init = &evergreen_init,
705 .fini = &evergreen_fini,
706 .suspend = &evergreen_suspend,
707 .resume = &evergreen_resume,
709 .gpu_reset = &evergreen_gpu_reset,
710 .vga_set_state = &r600_vga_set_state,
711 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
712 .gart_set_page = &rs600_gart_set_page,
714 .ring_ib_execute = NULL,
717 .get_vblank_counter = NULL,
718 .fence_ring_emit = NULL,
723 .get_engine_clock = &radeon_atom_get_engine_clock,
724 .set_engine_clock = &radeon_atom_set_engine_clock,
725 .get_memory_clock = &radeon_atom_get_memory_clock,
726 .set_memory_clock = &radeon_atom_set_memory_clock,
727 .set_pcie_lanes = NULL,
728 .set_clock_gating = NULL,
729 .set_surface_reg = r600_set_surface_reg,
730 .clear_surface_reg = r600_clear_surface_reg,
731 .bandwidth_update = &evergreen_bandwidth_update,
732 .hpd_init = &evergreen_hpd_init,
733 .hpd_fini = &evergreen_hpd_fini,
734 .hpd_sense = &evergreen_hpd_sense,
735 .hpd_set_polarity = &evergreen_hpd_set_polarity,