]> bbs.cooldavid.org Git - net-next-2.6.git/blob - drivers/gpu/drm/radeon/radeon_asic.h
0fa117afc74231a18bed099f889b37e7d91202e8
[net-next-2.6.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2  * Copyright 2008 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  * Copyright 2009 Jerome Glisse.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22  * OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors: Dave Airlie
25  *          Alex Deucher
26  *          Jerome Glisse
27  */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32  * common functions
33  */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42  * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43  */
44 int r100_init(struct radeon_device *rdev);
45 int r200_init(struct radeon_device *rdev);
46 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48 void r100_errata(struct radeon_device *rdev);
49 void r100_vram_info(struct radeon_device *rdev);
50 int r100_gpu_reset(struct radeon_device *rdev);
51 int r100_mc_init(struct radeon_device *rdev);
52 void r100_mc_fini(struct radeon_device *rdev);
53 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
54 int r100_wb_init(struct radeon_device *rdev);
55 void r100_wb_fini(struct radeon_device *rdev);
56 int r100_pci_gart_init(struct radeon_device *rdev);
57 void r100_pci_gart_fini(struct radeon_device *rdev);
58 int r100_pci_gart_enable(struct radeon_device *rdev);
59 void r100_pci_gart_disable(struct radeon_device *rdev);
60 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
61 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
62 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
63 void r100_cp_fini(struct radeon_device *rdev);
64 void r100_cp_disable(struct radeon_device *rdev);
65 void r100_cp_commit(struct radeon_device *rdev);
66 void r100_ring_start(struct radeon_device *rdev);
67 int r100_irq_set(struct radeon_device *rdev);
68 int r100_irq_process(struct radeon_device *rdev);
69 void r100_fence_ring_emit(struct radeon_device *rdev,
70                           struct radeon_fence *fence);
71 int r100_cs_parse(struct radeon_cs_parser *p);
72 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
73 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
74 int r100_copy_blit(struct radeon_device *rdev,
75                    uint64_t src_offset,
76                    uint64_t dst_offset,
77                    unsigned num_pages,
78                    struct radeon_fence *fence);
79 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
80                          uint32_t tiling_flags, uint32_t pitch,
81                          uint32_t offset, uint32_t obj_size);
82 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
83 void r100_bandwidth_update(struct radeon_device *rdev);
84 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
85 int r100_ib_test(struct radeon_device *rdev);
86 int r100_ring_test(struct radeon_device *rdev);
87
88 static struct radeon_asic r100_asic = {
89         .init = &r100_init,
90         .errata = &r100_errata,
91         .vram_info = &r100_vram_info,
92         .gpu_reset = &r100_gpu_reset,
93         .mc_init = &r100_mc_init,
94         .mc_fini = &r100_mc_fini,
95         .wb_init = &r100_wb_init,
96         .wb_fini = &r100_wb_fini,
97         .gart_init = &r100_pci_gart_init,
98         .gart_fini = &r100_pci_gart_fini,
99         .gart_enable = &r100_pci_gart_enable,
100         .gart_disable = &r100_pci_gart_disable,
101         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
102         .gart_set_page = &r100_pci_gart_set_page,
103         .cp_init = &r100_cp_init,
104         .cp_fini = &r100_cp_fini,
105         .cp_disable = &r100_cp_disable,
106         .cp_commit = &r100_cp_commit,
107         .ring_start = &r100_ring_start,
108         .ring_test = &r100_ring_test,
109         .ring_ib_execute = &r100_ring_ib_execute,
110         .ib_test = &r100_ib_test,
111         .irq_set = &r100_irq_set,
112         .irq_process = &r100_irq_process,
113         .get_vblank_counter = &r100_get_vblank_counter,
114         .fence_ring_emit = &r100_fence_ring_emit,
115         .cs_parse = &r100_cs_parse,
116         .copy_blit = &r100_copy_blit,
117         .copy_dma = NULL,
118         .copy = &r100_copy_blit,
119         .set_engine_clock = &radeon_legacy_set_engine_clock,
120         .set_memory_clock = NULL,
121         .set_pcie_lanes = NULL,
122         .set_clock_gating = &radeon_legacy_set_clock_gating,
123         .set_surface_reg = r100_set_surface_reg,
124         .clear_surface_reg = r100_clear_surface_reg,
125         .bandwidth_update = &r100_bandwidth_update,
126 };
127
128
129 /*
130  * r300,r350,rv350,rv380
131  */
132 int r300_init(struct radeon_device *rdev);
133 void r300_errata(struct radeon_device *rdev);
134 void r300_vram_info(struct radeon_device *rdev);
135 int r300_gpu_reset(struct radeon_device *rdev);
136 int r300_mc_init(struct radeon_device *rdev);
137 void r300_mc_fini(struct radeon_device *rdev);
138 void r300_ring_start(struct radeon_device *rdev);
139 void r300_fence_ring_emit(struct radeon_device *rdev,
140                           struct radeon_fence *fence);
141 int r300_cs_parse(struct radeon_cs_parser *p);
142 int rv370_pcie_gart_init(struct radeon_device *rdev);
143 void rv370_pcie_gart_fini(struct radeon_device *rdev);
144 int rv370_pcie_gart_enable(struct radeon_device *rdev);
145 void rv370_pcie_gart_disable(struct radeon_device *rdev);
146 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
147 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
148 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
149 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
150 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
151 int r300_copy_dma(struct radeon_device *rdev,
152                   uint64_t src_offset,
153                   uint64_t dst_offset,
154                   unsigned num_pages,
155                   struct radeon_fence *fence);
156
157 static struct radeon_asic r300_asic = {
158         .init = &r300_init,
159         .errata = &r300_errata,
160         .vram_info = &r300_vram_info,
161         .gpu_reset = &r300_gpu_reset,
162         .mc_init = &r300_mc_init,
163         .mc_fini = &r300_mc_fini,
164         .wb_init = &r100_wb_init,
165         .wb_fini = &r100_wb_fini,
166         .gart_init = &r100_pci_gart_init,
167         .gart_fini = &r100_pci_gart_fini,
168         .gart_enable = &r100_pci_gart_enable,
169         .gart_disable = &r100_pci_gart_disable,
170         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
171         .gart_set_page = &r100_pci_gart_set_page,
172         .cp_init = &r100_cp_init,
173         .cp_fini = &r100_cp_fini,
174         .cp_disable = &r100_cp_disable,
175         .cp_commit = &r100_cp_commit,
176         .ring_start = &r300_ring_start,
177         .ring_test = &r100_ring_test,
178         .ring_ib_execute = &r100_ring_ib_execute,
179         .ib_test = &r100_ib_test,
180         .irq_set = &r100_irq_set,
181         .irq_process = &r100_irq_process,
182         .get_vblank_counter = &r100_get_vblank_counter,
183         .fence_ring_emit = &r300_fence_ring_emit,
184         .cs_parse = &r300_cs_parse,
185         .copy_blit = &r100_copy_blit,
186         .copy_dma = &r300_copy_dma,
187         .copy = &r100_copy_blit,
188         .set_engine_clock = &radeon_legacy_set_engine_clock,
189         .set_memory_clock = NULL,
190         .set_pcie_lanes = &rv370_set_pcie_lanes,
191         .set_clock_gating = &radeon_legacy_set_clock_gating,
192         .set_surface_reg = r100_set_surface_reg,
193         .clear_surface_reg = r100_clear_surface_reg,
194         .bandwidth_update = &r100_bandwidth_update,
195 };
196
197 /*
198  * r420,r423,rv410
199  */
200 extern int r420_init(struct radeon_device *rdev);
201 extern void r420_fini(struct radeon_device *rdev);
202 extern int r420_suspend(struct radeon_device *rdev);
203 extern int r420_resume(struct radeon_device *rdev);
204 static struct radeon_asic r420_asic = {
205         .init = &r420_init,
206         .fini = &r420_fini,
207         .suspend = &r420_suspend,
208         .resume = &r420_resume,
209         .errata = NULL,
210         .vram_info = NULL,
211         .gpu_reset = &r300_gpu_reset,
212         .mc_init = NULL,
213         .mc_fini = NULL,
214         .wb_init = NULL,
215         .wb_fini = NULL,
216         .gart_enable = NULL,
217         .gart_disable = NULL,
218         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
219         .gart_set_page = &rv370_pcie_gart_set_page,
220         .cp_init = NULL,
221         .cp_fini = NULL,
222         .cp_disable = NULL,
223         .cp_commit = &r100_cp_commit,
224         .ring_start = &r300_ring_start,
225         .ring_test = &r100_ring_test,
226         .ring_ib_execute = &r100_ring_ib_execute,
227         .ib_test = NULL,
228         .irq_set = &r100_irq_set,
229         .irq_process = &r100_irq_process,
230         .get_vblank_counter = &r100_get_vblank_counter,
231         .fence_ring_emit = &r300_fence_ring_emit,
232         .cs_parse = &r300_cs_parse,
233         .copy_blit = &r100_copy_blit,
234         .copy_dma = &r300_copy_dma,
235         .copy = &r100_copy_blit,
236         .set_engine_clock = &radeon_atom_set_engine_clock,
237         .set_memory_clock = &radeon_atom_set_memory_clock,
238         .set_pcie_lanes = &rv370_set_pcie_lanes,
239         .set_clock_gating = &radeon_atom_set_clock_gating,
240         .set_surface_reg = r100_set_surface_reg,
241         .clear_surface_reg = r100_clear_surface_reg,
242         .bandwidth_update = &r100_bandwidth_update,
243 };
244
245
246 /*
247  * rs400,rs480
248  */
249 extern int rs400_init(struct radeon_device *rdev);
250 extern void rs400_fini(struct radeon_device *rdev);
251 extern int rs400_suspend(struct radeon_device *rdev);
252 extern int rs400_resume(struct radeon_device *rdev);
253 void rs400_gart_tlb_flush(struct radeon_device *rdev);
254 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
255 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
256 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
257 static struct radeon_asic rs400_asic = {
258         .init = &rs400_init,
259         .fini = &rs400_fini,
260         .suspend = &rs400_suspend,
261         .resume = &rs400_resume,
262         .errata = NULL,
263         .vram_info = NULL,
264         .gpu_reset = &r300_gpu_reset,
265         .mc_init = NULL,
266         .mc_fini = NULL,
267         .wb_init = NULL,
268         .wb_fini = NULL,
269         .gart_init = NULL,
270         .gart_fini = NULL,
271         .gart_enable = NULL,
272         .gart_disable = NULL,
273         .gart_tlb_flush = &rs400_gart_tlb_flush,
274         .gart_set_page = &rs400_gart_set_page,
275         .cp_init = NULL,
276         .cp_fini = NULL,
277         .cp_disable = NULL,
278         .cp_commit = &r100_cp_commit,
279         .ring_start = &r300_ring_start,
280         .ring_test = &r100_ring_test,
281         .ring_ib_execute = &r100_ring_ib_execute,
282         .ib_test = NULL,
283         .irq_set = &r100_irq_set,
284         .irq_process = &r100_irq_process,
285         .get_vblank_counter = &r100_get_vblank_counter,
286         .fence_ring_emit = &r300_fence_ring_emit,
287         .cs_parse = &r300_cs_parse,
288         .copy_blit = &r100_copy_blit,
289         .copy_dma = &r300_copy_dma,
290         .copy = &r100_copy_blit,
291         .set_engine_clock = &radeon_legacy_set_engine_clock,
292         .set_memory_clock = NULL,
293         .set_pcie_lanes = NULL,
294         .set_clock_gating = &radeon_legacy_set_clock_gating,
295         .set_surface_reg = r100_set_surface_reg,
296         .clear_surface_reg = r100_clear_surface_reg,
297         .bandwidth_update = &r100_bandwidth_update,
298 };
299
300
301 /*
302  * rs600.
303  */
304 int rs600_init(struct radeon_device *rdev);
305 void rs600_errata(struct radeon_device *rdev);
306 void rs600_vram_info(struct radeon_device *rdev);
307 int rs600_mc_init(struct radeon_device *rdev);
308 void rs600_mc_fini(struct radeon_device *rdev);
309 int rs600_irq_set(struct radeon_device *rdev);
310 int rs600_irq_process(struct radeon_device *rdev);
311 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
312 int rs600_gart_init(struct radeon_device *rdev);
313 void rs600_gart_fini(struct radeon_device *rdev);
314 int rs600_gart_enable(struct radeon_device *rdev);
315 void rs600_gart_disable(struct radeon_device *rdev);
316 void rs600_gart_tlb_flush(struct radeon_device *rdev);
317 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
318 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
319 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
320 void rs600_bandwidth_update(struct radeon_device *rdev);
321 static struct radeon_asic rs600_asic = {
322         .init = &rs600_init,
323         .errata = &rs600_errata,
324         .vram_info = &rs600_vram_info,
325         .gpu_reset = &r300_gpu_reset,
326         .mc_init = &rs600_mc_init,
327         .mc_fini = &rs600_mc_fini,
328         .wb_init = &r100_wb_init,
329         .wb_fini = &r100_wb_fini,
330         .gart_init = &rs600_gart_init,
331         .gart_fini = &rs600_gart_fini,
332         .gart_enable = &rs600_gart_enable,
333         .gart_disable = &rs600_gart_disable,
334         .gart_tlb_flush = &rs600_gart_tlb_flush,
335         .gart_set_page = &rs600_gart_set_page,
336         .cp_init = &r100_cp_init,
337         .cp_fini = &r100_cp_fini,
338         .cp_disable = &r100_cp_disable,
339         .cp_commit = &r100_cp_commit,
340         .ring_start = &r300_ring_start,
341         .ring_test = &r100_ring_test,
342         .ring_ib_execute = &r100_ring_ib_execute,
343         .ib_test = &r100_ib_test,
344         .irq_set = &rs600_irq_set,
345         .irq_process = &rs600_irq_process,
346         .get_vblank_counter = &rs600_get_vblank_counter,
347         .fence_ring_emit = &r300_fence_ring_emit,
348         .cs_parse = &r300_cs_parse,
349         .copy_blit = &r100_copy_blit,
350         .copy_dma = &r300_copy_dma,
351         .copy = &r100_copy_blit,
352         .set_engine_clock = &radeon_atom_set_engine_clock,
353         .set_memory_clock = &radeon_atom_set_memory_clock,
354         .set_pcie_lanes = NULL,
355         .set_clock_gating = &radeon_atom_set_clock_gating,
356         .bandwidth_update = &rs600_bandwidth_update,
357 };
358
359
360 /*
361  * rs690,rs740
362  */
363 void rs690_errata(struct radeon_device *rdev);
364 void rs690_vram_info(struct radeon_device *rdev);
365 int rs690_mc_init(struct radeon_device *rdev);
366 void rs690_mc_fini(struct radeon_device *rdev);
367 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
368 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
369 void rs690_bandwidth_update(struct radeon_device *rdev);
370 static struct radeon_asic rs690_asic = {
371         .init = &rs600_init,
372         .errata = &rs690_errata,
373         .vram_info = &rs690_vram_info,
374         .gpu_reset = &r300_gpu_reset,
375         .mc_init = &rs690_mc_init,
376         .mc_fini = &rs690_mc_fini,
377         .wb_init = &r100_wb_init,
378         .wb_fini = &r100_wb_fini,
379         .gart_init = &rs400_gart_init,
380         .gart_fini = &rs400_gart_fini,
381         .gart_enable = &rs400_gart_enable,
382         .gart_disable = &rs400_gart_disable,
383         .gart_tlb_flush = &rs400_gart_tlb_flush,
384         .gart_set_page = &rs400_gart_set_page,
385         .cp_init = &r100_cp_init,
386         .cp_fini = &r100_cp_fini,
387         .cp_disable = &r100_cp_disable,
388         .cp_commit = &r100_cp_commit,
389         .ring_start = &r300_ring_start,
390         .ring_test = &r100_ring_test,
391         .ring_ib_execute = &r100_ring_ib_execute,
392         .ib_test = &r100_ib_test,
393         .irq_set = &rs600_irq_set,
394         .irq_process = &rs600_irq_process,
395         .get_vblank_counter = &rs600_get_vblank_counter,
396         .fence_ring_emit = &r300_fence_ring_emit,
397         .cs_parse = &r300_cs_parse,
398         .copy_blit = &r100_copy_blit,
399         .copy_dma = &r300_copy_dma,
400         .copy = &r300_copy_dma,
401         .set_engine_clock = &radeon_atom_set_engine_clock,
402         .set_memory_clock = &radeon_atom_set_memory_clock,
403         .set_pcie_lanes = NULL,
404         .set_clock_gating = &radeon_atom_set_clock_gating,
405         .set_surface_reg = r100_set_surface_reg,
406         .clear_surface_reg = r100_clear_surface_reg,
407         .bandwidth_update = &rs690_bandwidth_update,
408 };
409
410
411 /*
412  * rv515
413  */
414 int rv515_init(struct radeon_device *rdev);
415 void rv515_fini(struct radeon_device *rdev);
416 int rv515_gpu_reset(struct radeon_device *rdev);
417 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
418 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
419 void rv515_ring_start(struct radeon_device *rdev);
420 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
421 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
422 void rv515_bandwidth_update(struct radeon_device *rdev);
423 int rv515_resume(struct radeon_device *rdev);
424 int rv515_suspend(struct radeon_device *rdev);
425 static struct radeon_asic rv515_asic = {
426         .init = &rv515_init,
427         .fini = &rv515_fini,
428         .suspend = &rv515_suspend,
429         .resume = &rv515_resume,
430         .errata = NULL,
431         .vram_info = NULL,
432         .gpu_reset = &rv515_gpu_reset,
433         .mc_init = NULL,
434         .mc_fini = NULL,
435         .wb_init = NULL,
436         .wb_fini = NULL,
437         .gart_init = &rv370_pcie_gart_init,
438         .gart_fini = &rv370_pcie_gart_fini,
439         .gart_enable = NULL,
440         .gart_disable = NULL,
441         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
442         .gart_set_page = &rv370_pcie_gart_set_page,
443         .cp_init = NULL,
444         .cp_fini = NULL,
445         .cp_disable = NULL,
446         .cp_commit = &r100_cp_commit,
447         .ring_start = &rv515_ring_start,
448         .ring_test = &r100_ring_test,
449         .ring_ib_execute = &r100_ring_ib_execute,
450         .ib_test = NULL,
451         .irq_set = &rs600_irq_set,
452         .irq_process = &rs600_irq_process,
453         .get_vblank_counter = &rs600_get_vblank_counter,
454         .fence_ring_emit = &r300_fence_ring_emit,
455         .cs_parse = &r300_cs_parse,
456         .copy_blit = &r100_copy_blit,
457         .copy_dma = &r300_copy_dma,
458         .copy = &r100_copy_blit,
459         .set_engine_clock = &radeon_atom_set_engine_clock,
460         .set_memory_clock = &radeon_atom_set_memory_clock,
461         .set_pcie_lanes = &rv370_set_pcie_lanes,
462         .set_clock_gating = &radeon_atom_set_clock_gating,
463         .set_surface_reg = r100_set_surface_reg,
464         .clear_surface_reg = r100_clear_surface_reg,
465         .bandwidth_update = &rv515_bandwidth_update,
466 };
467
468
469 /*
470  * r520,rv530,rv560,rv570,r580
471  */
472 int r520_init(struct radeon_device *rdev);
473 int r520_resume(struct radeon_device *rdev);
474 static struct radeon_asic r520_asic = {
475         .init = &r520_init,
476         .fini = &rv515_fini,
477         .suspend = &rv515_suspend,
478         .resume = &r520_resume,
479         .errata = NULL,
480         .vram_info = NULL,
481         .gpu_reset = &rv515_gpu_reset,
482         .mc_init = NULL,
483         .mc_fini = NULL,
484         .wb_init = NULL,
485         .wb_fini = NULL,
486         .gart_init = NULL,
487         .gart_fini = NULL,
488         .gart_enable = NULL,
489         .gart_disable = NULL,
490         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
491         .gart_set_page = &rv370_pcie_gart_set_page,
492         .cp_init = NULL,
493         .cp_fini = NULL,
494         .cp_disable = NULL,
495         .cp_commit = &r100_cp_commit,
496         .ring_start = &rv515_ring_start,
497         .ring_test = &r100_ring_test,
498         .ring_ib_execute = &r100_ring_ib_execute,
499         .ib_test = NULL,
500         .irq_set = &rs600_irq_set,
501         .irq_process = &rs600_irq_process,
502         .get_vblank_counter = &rs600_get_vblank_counter,
503         .fence_ring_emit = &r300_fence_ring_emit,
504         .cs_parse = &r300_cs_parse,
505         .copy_blit = &r100_copy_blit,
506         .copy_dma = &r300_copy_dma,
507         .copy = &r100_copy_blit,
508         .set_engine_clock = &radeon_atom_set_engine_clock,
509         .set_memory_clock = &radeon_atom_set_memory_clock,
510         .set_pcie_lanes = &rv370_set_pcie_lanes,
511         .set_clock_gating = &radeon_atom_set_clock_gating,
512         .set_surface_reg = r100_set_surface_reg,
513         .clear_surface_reg = r100_clear_surface_reg,
514         .bandwidth_update = &rv515_bandwidth_update,
515 };
516
517 /*
518  * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
519  */
520 int r600_init(struct radeon_device *rdev);
521 void r600_fini(struct radeon_device *rdev);
522 int r600_suspend(struct radeon_device *rdev);
523 int r600_resume(struct radeon_device *rdev);
524 int r600_wb_init(struct radeon_device *rdev);
525 void r600_wb_fini(struct radeon_device *rdev);
526 void r600_cp_commit(struct radeon_device *rdev);
527 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
528 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
529 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
530 int r600_cs_parse(struct radeon_cs_parser *p);
531 void r600_fence_ring_emit(struct radeon_device *rdev,
532                           struct radeon_fence *fence);
533 int r600_copy_dma(struct radeon_device *rdev,
534                   uint64_t src_offset,
535                   uint64_t dst_offset,
536                   unsigned num_pages,
537                   struct radeon_fence *fence);
538 int r600_irq_process(struct radeon_device *rdev);
539 int r600_irq_set(struct radeon_device *rdev);
540 int r600_gpu_reset(struct radeon_device *rdev);
541 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
542                          uint32_t tiling_flags, uint32_t pitch,
543                          uint32_t offset, uint32_t obj_size);
544 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
545 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
546 int r600_ib_test(struct radeon_device *rdev);
547 int r600_ring_test(struct radeon_device *rdev);
548 int r600_copy_blit(struct radeon_device *rdev,
549                    uint64_t src_offset, uint64_t dst_offset,
550                    unsigned num_pages, struct radeon_fence *fence);
551
552 static struct radeon_asic r600_asic = {
553         .errata = NULL,
554         .init = &r600_init,
555         .fini = &r600_fini,
556         .suspend = &r600_suspend,
557         .resume = &r600_resume,
558         .cp_commit = &r600_cp_commit,
559         .vram_info = NULL,
560         .gpu_reset = &r600_gpu_reset,
561         .mc_init = NULL,
562         .mc_fini = NULL,
563         .wb_init = &r600_wb_init,
564         .wb_fini = &r600_wb_fini,
565         .gart_enable = NULL,
566         .gart_disable = NULL,
567         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
568         .gart_set_page = &rs600_gart_set_page,
569         .cp_init = NULL,
570         .cp_fini = NULL,
571         .cp_disable = NULL,
572         .ring_start = NULL,
573         .ring_test = &r600_ring_test,
574         .ring_ib_execute = &r600_ring_ib_execute,
575         .ib_test = &r600_ib_test,
576         .irq_set = &r600_irq_set,
577         .irq_process = &r600_irq_process,
578         .fence_ring_emit = &r600_fence_ring_emit,
579         .cs_parse = &r600_cs_parse,
580         .copy_blit = &r600_copy_blit,
581         .copy_dma = &r600_copy_blit,
582         .copy = &r600_copy_blit,
583         .set_engine_clock = &radeon_atom_set_engine_clock,
584         .set_memory_clock = &radeon_atom_set_memory_clock,
585         .set_pcie_lanes = NULL,
586         .set_clock_gating = &radeon_atom_set_clock_gating,
587         .set_surface_reg = r600_set_surface_reg,
588         .clear_surface_reg = r600_clear_surface_reg,
589         .bandwidth_update = &rv515_bandwidth_update,
590 };
591
592 /*
593  * rv770,rv730,rv710,rv740
594  */
595 int rv770_init(struct radeon_device *rdev);
596 void rv770_fini(struct radeon_device *rdev);
597 int rv770_suspend(struct radeon_device *rdev);
598 int rv770_resume(struct radeon_device *rdev);
599 int rv770_gpu_reset(struct radeon_device *rdev);
600
601 static struct radeon_asic rv770_asic = {
602         .errata = NULL,
603         .init = &rv770_init,
604         .fini = &rv770_fini,
605         .suspend = &rv770_suspend,
606         .resume = &rv770_resume,
607         .cp_commit = &r600_cp_commit,
608         .vram_info = NULL,
609         .gpu_reset = &rv770_gpu_reset,
610         .mc_init = NULL,
611         .mc_fini = NULL,
612         .wb_init = &r600_wb_init,
613         .wb_fini = &r600_wb_fini,
614         .gart_enable = NULL,
615         .gart_disable = NULL,
616         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
617         .gart_set_page = &rs600_gart_set_page,
618         .cp_init = NULL,
619         .cp_fini = NULL,
620         .cp_disable = NULL,
621         .ring_start = NULL,
622         .ring_test = &r600_ring_test,
623         .ring_ib_execute = &r600_ring_ib_execute,
624         .ib_test = &r600_ib_test,
625         .irq_set = &r600_irq_set,
626         .irq_process = &r600_irq_process,
627         .fence_ring_emit = &r600_fence_ring_emit,
628         .cs_parse = &r600_cs_parse,
629         .copy_blit = &r600_copy_blit,
630         .copy_dma = &r600_copy_blit,
631         .copy = &r600_copy_blit,
632         .set_engine_clock = &radeon_atom_set_engine_clock,
633         .set_memory_clock = &radeon_atom_set_memory_clock,
634         .set_pcie_lanes = NULL,
635         .set_clock_gating = &radeon_atom_set_clock_gating,
636         .set_surface_reg = r600_set_surface_reg,
637         .clear_surface_reg = r600_clear_surface_reg,
638         .bandwidth_update = &rv515_bandwidth_update,
639 };
640
641 #endif