]> bbs.cooldavid.org Git - net-next-2.6.git/blob - drivers/gpu/drm/radeon/radeon_asic.h
bce0cb0638677da017b01748bcd5891d476a5e77
[net-next-2.6.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2  * Copyright 2008 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  * Copyright 2009 Jerome Glisse.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22  * OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors: Dave Airlie
25  *          Alex Deucher
26  *          Jerome Glisse
27  */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32  * common functions
33  */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42  * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43  */
44 int r100_init(struct radeon_device *rdev);
45 int r200_init(struct radeon_device *rdev);
46 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48 void r100_errata(struct radeon_device *rdev);
49 void r100_vram_info(struct radeon_device *rdev);
50 int r100_gpu_reset(struct radeon_device *rdev);
51 int r100_mc_init(struct radeon_device *rdev);
52 void r100_mc_fini(struct radeon_device *rdev);
53 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
54 int r100_wb_init(struct radeon_device *rdev);
55 void r100_wb_fini(struct radeon_device *rdev);
56 int r100_pci_gart_init(struct radeon_device *rdev);
57 void r100_pci_gart_fini(struct radeon_device *rdev);
58 int r100_pci_gart_enable(struct radeon_device *rdev);
59 void r100_pci_gart_disable(struct radeon_device *rdev);
60 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
61 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
62 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
63 void r100_cp_fini(struct radeon_device *rdev);
64 void r100_cp_disable(struct radeon_device *rdev);
65 void r100_cp_commit(struct radeon_device *rdev);
66 void r100_ring_start(struct radeon_device *rdev);
67 int r100_irq_set(struct radeon_device *rdev);
68 int r100_irq_process(struct radeon_device *rdev);
69 void r100_fence_ring_emit(struct radeon_device *rdev,
70                           struct radeon_fence *fence);
71 int r100_cs_parse(struct radeon_cs_parser *p);
72 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
73 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
74 int r100_copy_blit(struct radeon_device *rdev,
75                    uint64_t src_offset,
76                    uint64_t dst_offset,
77                    unsigned num_pages,
78                    struct radeon_fence *fence);
79 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
80                          uint32_t tiling_flags, uint32_t pitch,
81                          uint32_t offset, uint32_t obj_size);
82 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
83 void r100_bandwidth_update(struct radeon_device *rdev);
84 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
85 int r100_ib_test(struct radeon_device *rdev);
86 int r100_ring_test(struct radeon_device *rdev);
87
88 static struct radeon_asic r100_asic = {
89         .init = &r100_init,
90         .errata = &r100_errata,
91         .vram_info = &r100_vram_info,
92         .gpu_reset = &r100_gpu_reset,
93         .mc_init = &r100_mc_init,
94         .mc_fini = &r100_mc_fini,
95         .wb_init = &r100_wb_init,
96         .wb_fini = &r100_wb_fini,
97         .gart_init = &r100_pci_gart_init,
98         .gart_fini = &r100_pci_gart_fini,
99         .gart_enable = &r100_pci_gart_enable,
100         .gart_disable = &r100_pci_gart_disable,
101         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
102         .gart_set_page = &r100_pci_gart_set_page,
103         .cp_init = &r100_cp_init,
104         .cp_fini = &r100_cp_fini,
105         .cp_disable = &r100_cp_disable,
106         .cp_commit = &r100_cp_commit,
107         .ring_start = &r100_ring_start,
108         .ring_test = &r100_ring_test,
109         .ring_ib_execute = &r100_ring_ib_execute,
110         .ib_test = &r100_ib_test,
111         .irq_set = &r100_irq_set,
112         .irq_process = &r100_irq_process,
113         .get_vblank_counter = &r100_get_vblank_counter,
114         .fence_ring_emit = &r100_fence_ring_emit,
115         .cs_parse = &r100_cs_parse,
116         .copy_blit = &r100_copy_blit,
117         .copy_dma = NULL,
118         .copy = &r100_copy_blit,
119         .set_engine_clock = &radeon_legacy_set_engine_clock,
120         .set_memory_clock = NULL,
121         .set_pcie_lanes = NULL,
122         .set_clock_gating = &radeon_legacy_set_clock_gating,
123         .set_surface_reg = r100_set_surface_reg,
124         .clear_surface_reg = r100_clear_surface_reg,
125         .bandwidth_update = &r100_bandwidth_update,
126 };
127
128
129 /*
130  * r300,r350,rv350,rv380
131  */
132 int r300_init(struct radeon_device *rdev);
133 void r300_errata(struct radeon_device *rdev);
134 void r300_vram_info(struct radeon_device *rdev);
135 int r300_gpu_reset(struct radeon_device *rdev);
136 int r300_mc_init(struct radeon_device *rdev);
137 void r300_mc_fini(struct radeon_device *rdev);
138 void r300_ring_start(struct radeon_device *rdev);
139 void r300_fence_ring_emit(struct radeon_device *rdev,
140                           struct radeon_fence *fence);
141 int r300_cs_parse(struct radeon_cs_parser *p);
142 int rv370_pcie_gart_init(struct radeon_device *rdev);
143 void rv370_pcie_gart_fini(struct radeon_device *rdev);
144 int rv370_pcie_gart_enable(struct radeon_device *rdev);
145 void rv370_pcie_gart_disable(struct radeon_device *rdev);
146 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
147 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
148 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
149 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
150 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
151 int r300_copy_dma(struct radeon_device *rdev,
152                   uint64_t src_offset,
153                   uint64_t dst_offset,
154                   unsigned num_pages,
155                   struct radeon_fence *fence);
156
157 static struct radeon_asic r300_asic = {
158         .init = &r300_init,
159         .errata = &r300_errata,
160         .vram_info = &r300_vram_info,
161         .gpu_reset = &r300_gpu_reset,
162         .mc_init = &r300_mc_init,
163         .mc_fini = &r300_mc_fini,
164         .wb_init = &r100_wb_init,
165         .wb_fini = &r100_wb_fini,
166         .gart_init = &r100_pci_gart_init,
167         .gart_fini = &r100_pci_gart_fini,
168         .gart_enable = &r100_pci_gart_enable,
169         .gart_disable = &r100_pci_gart_disable,
170         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
171         .gart_set_page = &r100_pci_gart_set_page,
172         .cp_init = &r100_cp_init,
173         .cp_fini = &r100_cp_fini,
174         .cp_disable = &r100_cp_disable,
175         .cp_commit = &r100_cp_commit,
176         .ring_start = &r300_ring_start,
177         .ring_test = &r100_ring_test,
178         .ring_ib_execute = &r100_ring_ib_execute,
179         .ib_test = &r100_ib_test,
180         .irq_set = &r100_irq_set,
181         .irq_process = &r100_irq_process,
182         .get_vblank_counter = &r100_get_vblank_counter,
183         .fence_ring_emit = &r300_fence_ring_emit,
184         .cs_parse = &r300_cs_parse,
185         .copy_blit = &r100_copy_blit,
186         .copy_dma = &r300_copy_dma,
187         .copy = &r100_copy_blit,
188         .set_engine_clock = &radeon_legacy_set_engine_clock,
189         .set_memory_clock = NULL,
190         .set_pcie_lanes = &rv370_set_pcie_lanes,
191         .set_clock_gating = &radeon_legacy_set_clock_gating,
192         .set_surface_reg = r100_set_surface_reg,
193         .clear_surface_reg = r100_clear_surface_reg,
194         .bandwidth_update = &r100_bandwidth_update,
195 };
196
197 /*
198  * r420,r423,rv410
199  */
200 extern int r420_init(struct radeon_device *rdev);
201 extern void r420_fini(struct radeon_device *rdev);
202 extern int r420_suspend(struct radeon_device *rdev);
203 extern int r420_resume(struct radeon_device *rdev);
204 static struct radeon_asic r420_asic = {
205         .init = &r420_init,
206         .fini = &r420_fini,
207         .suspend = &r420_suspend,
208         .resume = &r420_resume,
209         .errata = NULL,
210         .vram_info = NULL,
211         .gpu_reset = &r300_gpu_reset,
212         .mc_init = NULL,
213         .mc_fini = NULL,
214         .wb_init = NULL,
215         .wb_fini = NULL,
216         .gart_enable = NULL,
217         .gart_disable = NULL,
218         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
219         .gart_set_page = &rv370_pcie_gart_set_page,
220         .cp_init = NULL,
221         .cp_fini = NULL,
222         .cp_disable = NULL,
223         .cp_commit = &r100_cp_commit,
224         .ring_start = &r300_ring_start,
225         .ring_test = &r100_ring_test,
226         .ring_ib_execute = &r100_ring_ib_execute,
227         .ib_test = NULL,
228         .irq_set = &r100_irq_set,
229         .irq_process = &r100_irq_process,
230         .get_vblank_counter = &r100_get_vblank_counter,
231         .fence_ring_emit = &r300_fence_ring_emit,
232         .cs_parse = &r300_cs_parse,
233         .copy_blit = &r100_copy_blit,
234         .copy_dma = &r300_copy_dma,
235         .copy = &r100_copy_blit,
236         .set_engine_clock = &radeon_atom_set_engine_clock,
237         .set_memory_clock = &radeon_atom_set_memory_clock,
238         .set_pcie_lanes = &rv370_set_pcie_lanes,
239         .set_clock_gating = &radeon_atom_set_clock_gating,
240         .set_surface_reg = r100_set_surface_reg,
241         .clear_surface_reg = r100_clear_surface_reg,
242         .bandwidth_update = &r100_bandwidth_update,
243 };
244
245
246 /*
247  * rs400,rs480
248  */
249 void rs400_errata(struct radeon_device *rdev);
250 void rs400_vram_info(struct radeon_device *rdev);
251 int rs400_mc_init(struct radeon_device *rdev);
252 void rs400_mc_fini(struct radeon_device *rdev);
253 int rs400_gart_init(struct radeon_device *rdev);
254 void rs400_gart_fini(struct radeon_device *rdev);
255 int rs400_gart_enable(struct radeon_device *rdev);
256 void rs400_gart_disable(struct radeon_device *rdev);
257 void rs400_gart_tlb_flush(struct radeon_device *rdev);
258 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
259 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
260 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
261 static struct radeon_asic rs400_asic = {
262         .init = &r300_init,
263         .errata = &rs400_errata,
264         .vram_info = &rs400_vram_info,
265         .gpu_reset = &r300_gpu_reset,
266         .mc_init = &rs400_mc_init,
267         .mc_fini = &rs400_mc_fini,
268         .wb_init = &r100_wb_init,
269         .wb_fini = &r100_wb_fini,
270         .gart_init = &rs400_gart_init,
271         .gart_fini = &rs400_gart_fini,
272         .gart_enable = &rs400_gart_enable,
273         .gart_disable = &rs400_gart_disable,
274         .gart_tlb_flush = &rs400_gart_tlb_flush,
275         .gart_set_page = &rs400_gart_set_page,
276         .cp_init = &r100_cp_init,
277         .cp_fini = &r100_cp_fini,
278         .cp_disable = &r100_cp_disable,
279         .cp_commit = &r100_cp_commit,
280         .ring_start = &r300_ring_start,
281         .ring_test = &r100_ring_test,
282         .ring_ib_execute = &r100_ring_ib_execute,
283         .ib_test = &r100_ib_test,
284         .irq_set = &r100_irq_set,
285         .irq_process = &r100_irq_process,
286         .get_vblank_counter = &r100_get_vblank_counter,
287         .fence_ring_emit = &r300_fence_ring_emit,
288         .cs_parse = &r300_cs_parse,
289         .copy_blit = &r100_copy_blit,
290         .copy_dma = &r300_copy_dma,
291         .copy = &r100_copy_blit,
292         .set_engine_clock = &radeon_legacy_set_engine_clock,
293         .set_memory_clock = NULL,
294         .set_pcie_lanes = NULL,
295         .set_clock_gating = &radeon_legacy_set_clock_gating,
296         .set_surface_reg = r100_set_surface_reg,
297         .clear_surface_reg = r100_clear_surface_reg,
298         .bandwidth_update = &r100_bandwidth_update,
299 };
300
301
302 /*
303  * rs600.
304  */
305 int rs600_init(struct radeon_device *rdev);
306 void rs600_errata(struct radeon_device *rdev);
307 void rs600_vram_info(struct radeon_device *rdev);
308 int rs600_mc_init(struct radeon_device *rdev);
309 void rs600_mc_fini(struct radeon_device *rdev);
310 int rs600_irq_set(struct radeon_device *rdev);
311 int rs600_irq_process(struct radeon_device *rdev);
312 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
313 int rs600_gart_init(struct radeon_device *rdev);
314 void rs600_gart_fini(struct radeon_device *rdev);
315 int rs600_gart_enable(struct radeon_device *rdev);
316 void rs600_gart_disable(struct radeon_device *rdev);
317 void rs600_gart_tlb_flush(struct radeon_device *rdev);
318 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
319 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
320 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
321 void rs600_bandwidth_update(struct radeon_device *rdev);
322 static struct radeon_asic rs600_asic = {
323         .init = &rs600_init,
324         .errata = &rs600_errata,
325         .vram_info = &rs600_vram_info,
326         .gpu_reset = &r300_gpu_reset,
327         .mc_init = &rs600_mc_init,
328         .mc_fini = &rs600_mc_fini,
329         .wb_init = &r100_wb_init,
330         .wb_fini = &r100_wb_fini,
331         .gart_init = &rs600_gart_init,
332         .gart_fini = &rs600_gart_fini,
333         .gart_enable = &rs600_gart_enable,
334         .gart_disable = &rs600_gart_disable,
335         .gart_tlb_flush = &rs600_gart_tlb_flush,
336         .gart_set_page = &rs600_gart_set_page,
337         .cp_init = &r100_cp_init,
338         .cp_fini = &r100_cp_fini,
339         .cp_disable = &r100_cp_disable,
340         .cp_commit = &r100_cp_commit,
341         .ring_start = &r300_ring_start,
342         .ring_test = &r100_ring_test,
343         .ring_ib_execute = &r100_ring_ib_execute,
344         .ib_test = &r100_ib_test,
345         .irq_set = &rs600_irq_set,
346         .irq_process = &rs600_irq_process,
347         .get_vblank_counter = &rs600_get_vblank_counter,
348         .fence_ring_emit = &r300_fence_ring_emit,
349         .cs_parse = &r300_cs_parse,
350         .copy_blit = &r100_copy_blit,
351         .copy_dma = &r300_copy_dma,
352         .copy = &r100_copy_blit,
353         .set_engine_clock = &radeon_atom_set_engine_clock,
354         .set_memory_clock = &radeon_atom_set_memory_clock,
355         .set_pcie_lanes = NULL,
356         .set_clock_gating = &radeon_atom_set_clock_gating,
357         .bandwidth_update = &rs600_bandwidth_update,
358 };
359
360
361 /*
362  * rs690,rs740
363  */
364 void rs690_errata(struct radeon_device *rdev);
365 void rs690_vram_info(struct radeon_device *rdev);
366 int rs690_mc_init(struct radeon_device *rdev);
367 void rs690_mc_fini(struct radeon_device *rdev);
368 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
369 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
370 void rs690_bandwidth_update(struct radeon_device *rdev);
371 static struct radeon_asic rs690_asic = {
372         .init = &rs600_init,
373         .errata = &rs690_errata,
374         .vram_info = &rs690_vram_info,
375         .gpu_reset = &r300_gpu_reset,
376         .mc_init = &rs690_mc_init,
377         .mc_fini = &rs690_mc_fini,
378         .wb_init = &r100_wb_init,
379         .wb_fini = &r100_wb_fini,
380         .gart_init = &rs400_gart_init,
381         .gart_fini = &rs400_gart_fini,
382         .gart_enable = &rs400_gart_enable,
383         .gart_disable = &rs400_gart_disable,
384         .gart_tlb_flush = &rs400_gart_tlb_flush,
385         .gart_set_page = &rs400_gart_set_page,
386         .cp_init = &r100_cp_init,
387         .cp_fini = &r100_cp_fini,
388         .cp_disable = &r100_cp_disable,
389         .cp_commit = &r100_cp_commit,
390         .ring_start = &r300_ring_start,
391         .ring_test = &r100_ring_test,
392         .ring_ib_execute = &r100_ring_ib_execute,
393         .ib_test = &r100_ib_test,
394         .irq_set = &rs600_irq_set,
395         .irq_process = &rs600_irq_process,
396         .get_vblank_counter = &rs600_get_vblank_counter,
397         .fence_ring_emit = &r300_fence_ring_emit,
398         .cs_parse = &r300_cs_parse,
399         .copy_blit = &r100_copy_blit,
400         .copy_dma = &r300_copy_dma,
401         .copy = &r300_copy_dma,
402         .set_engine_clock = &radeon_atom_set_engine_clock,
403         .set_memory_clock = &radeon_atom_set_memory_clock,
404         .set_pcie_lanes = NULL,
405         .set_clock_gating = &radeon_atom_set_clock_gating,
406         .set_surface_reg = r100_set_surface_reg,
407         .clear_surface_reg = r100_clear_surface_reg,
408         .bandwidth_update = &rs690_bandwidth_update,
409 };
410
411
412 /*
413  * rv515
414  */
415 int rv515_init(struct radeon_device *rdev);
416 void rv515_fini(struct radeon_device *rdev);
417 int rv515_gpu_reset(struct radeon_device *rdev);
418 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
419 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
420 void rv515_ring_start(struct radeon_device *rdev);
421 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
422 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
423 void rv515_bandwidth_update(struct radeon_device *rdev);
424 int rv515_resume(struct radeon_device *rdev);
425 int rv515_suspend(struct radeon_device *rdev);
426 static struct radeon_asic rv515_asic = {
427         .init = &rv515_init,
428         .fini = &rv515_fini,
429         .suspend = &rv515_suspend,
430         .resume = &rv515_resume,
431         .errata = NULL,
432         .vram_info = NULL,
433         .gpu_reset = &rv515_gpu_reset,
434         .mc_init = NULL,
435         .mc_fini = NULL,
436         .wb_init = NULL,
437         .wb_fini = NULL,
438         .gart_init = &rv370_pcie_gart_init,
439         .gart_fini = &rv370_pcie_gart_fini,
440         .gart_enable = NULL,
441         .gart_disable = NULL,
442         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
443         .gart_set_page = &rv370_pcie_gart_set_page,
444         .cp_init = NULL,
445         .cp_fini = NULL,
446         .cp_disable = NULL,
447         .cp_commit = &r100_cp_commit,
448         .ring_start = &rv515_ring_start,
449         .ring_test = &r100_ring_test,
450         .ring_ib_execute = &r100_ring_ib_execute,
451         .ib_test = NULL,
452         .irq_set = &rs600_irq_set,
453         .irq_process = &rs600_irq_process,
454         .get_vblank_counter = &rs600_get_vblank_counter,
455         .fence_ring_emit = &r300_fence_ring_emit,
456         .cs_parse = &r300_cs_parse,
457         .copy_blit = &r100_copy_blit,
458         .copy_dma = &r300_copy_dma,
459         .copy = &r100_copy_blit,
460         .set_engine_clock = &radeon_atom_set_engine_clock,
461         .set_memory_clock = &radeon_atom_set_memory_clock,
462         .set_pcie_lanes = &rv370_set_pcie_lanes,
463         .set_clock_gating = &radeon_atom_set_clock_gating,
464         .set_surface_reg = r100_set_surface_reg,
465         .clear_surface_reg = r100_clear_surface_reg,
466         .bandwidth_update = &rv515_bandwidth_update,
467 };
468
469
470 /*
471  * r520,rv530,rv560,rv570,r580
472  */
473 int r520_init(struct radeon_device *rdev);
474 int r520_resume(struct radeon_device *rdev);
475 static struct radeon_asic r520_asic = {
476         .init = &r520_init,
477         .fini = &rv515_fini,
478         .suspend = &rv515_suspend,
479         .resume = &r520_resume,
480         .errata = NULL,
481         .vram_info = NULL,
482         .gpu_reset = &rv515_gpu_reset,
483         .mc_init = NULL,
484         .mc_fini = NULL,
485         .wb_init = NULL,
486         .wb_fini = NULL,
487         .gart_init = NULL,
488         .gart_fini = NULL,
489         .gart_enable = NULL,
490         .gart_disable = NULL,
491         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
492         .gart_set_page = &rv370_pcie_gart_set_page,
493         .cp_init = NULL,
494         .cp_fini = NULL,
495         .cp_disable = NULL,
496         .cp_commit = &r100_cp_commit,
497         .ring_start = &rv515_ring_start,
498         .ring_test = &r100_ring_test,
499         .ring_ib_execute = &r100_ring_ib_execute,
500         .ib_test = NULL,
501         .irq_set = &rs600_irq_set,
502         .irq_process = &rs600_irq_process,
503         .get_vblank_counter = &rs600_get_vblank_counter,
504         .fence_ring_emit = &r300_fence_ring_emit,
505         .cs_parse = &r300_cs_parse,
506         .copy_blit = &r100_copy_blit,
507         .copy_dma = &r300_copy_dma,
508         .copy = &r100_copy_blit,
509         .set_engine_clock = &radeon_atom_set_engine_clock,
510         .set_memory_clock = &radeon_atom_set_memory_clock,
511         .set_pcie_lanes = &rv370_set_pcie_lanes,
512         .set_clock_gating = &radeon_atom_set_clock_gating,
513         .set_surface_reg = r100_set_surface_reg,
514         .clear_surface_reg = r100_clear_surface_reg,
515         .bandwidth_update = &rv515_bandwidth_update,
516 };
517
518 /*
519  * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
520  */
521 int r600_init(struct radeon_device *rdev);
522 void r600_fini(struct radeon_device *rdev);
523 int r600_suspend(struct radeon_device *rdev);
524 int r600_resume(struct radeon_device *rdev);
525 int r600_wb_init(struct radeon_device *rdev);
526 void r600_wb_fini(struct radeon_device *rdev);
527 void r600_cp_commit(struct radeon_device *rdev);
528 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
529 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
530 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
531 int r600_cs_parse(struct radeon_cs_parser *p);
532 void r600_fence_ring_emit(struct radeon_device *rdev,
533                           struct radeon_fence *fence);
534 int r600_copy_dma(struct radeon_device *rdev,
535                   uint64_t src_offset,
536                   uint64_t dst_offset,
537                   unsigned num_pages,
538                   struct radeon_fence *fence);
539 int r600_irq_process(struct radeon_device *rdev);
540 int r600_irq_set(struct radeon_device *rdev);
541 int r600_gpu_reset(struct radeon_device *rdev);
542 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
543                          uint32_t tiling_flags, uint32_t pitch,
544                          uint32_t offset, uint32_t obj_size);
545 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
546 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
547 int r600_ib_test(struct radeon_device *rdev);
548 int r600_ring_test(struct radeon_device *rdev);
549 int r600_copy_blit(struct radeon_device *rdev,
550                    uint64_t src_offset, uint64_t dst_offset,
551                    unsigned num_pages, struct radeon_fence *fence);
552
553 static struct radeon_asic r600_asic = {
554         .errata = NULL,
555         .init = &r600_init,
556         .fini = &r600_fini,
557         .suspend = &r600_suspend,
558         .resume = &r600_resume,
559         .cp_commit = &r600_cp_commit,
560         .vram_info = NULL,
561         .gpu_reset = &r600_gpu_reset,
562         .mc_init = NULL,
563         .mc_fini = NULL,
564         .wb_init = &r600_wb_init,
565         .wb_fini = &r600_wb_fini,
566         .gart_enable = NULL,
567         .gart_disable = NULL,
568         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
569         .gart_set_page = &rs600_gart_set_page,
570         .cp_init = NULL,
571         .cp_fini = NULL,
572         .cp_disable = NULL,
573         .ring_start = NULL,
574         .ring_test = &r600_ring_test,
575         .ring_ib_execute = &r600_ring_ib_execute,
576         .ib_test = &r600_ib_test,
577         .irq_set = &r600_irq_set,
578         .irq_process = &r600_irq_process,
579         .fence_ring_emit = &r600_fence_ring_emit,
580         .cs_parse = &r600_cs_parse,
581         .copy_blit = &r600_copy_blit,
582         .copy_dma = &r600_copy_blit,
583         .copy = &r600_copy_blit,
584         .set_engine_clock = &radeon_atom_set_engine_clock,
585         .set_memory_clock = &radeon_atom_set_memory_clock,
586         .set_pcie_lanes = NULL,
587         .set_clock_gating = &radeon_atom_set_clock_gating,
588         .set_surface_reg = r600_set_surface_reg,
589         .clear_surface_reg = r600_clear_surface_reg,
590         .bandwidth_update = &rv515_bandwidth_update,
591 };
592
593 /*
594  * rv770,rv730,rv710,rv740
595  */
596 int rv770_init(struct radeon_device *rdev);
597 void rv770_fini(struct radeon_device *rdev);
598 int rv770_suspend(struct radeon_device *rdev);
599 int rv770_resume(struct radeon_device *rdev);
600 int rv770_gpu_reset(struct radeon_device *rdev);
601
602 static struct radeon_asic rv770_asic = {
603         .errata = NULL,
604         .init = &rv770_init,
605         .fini = &rv770_fini,
606         .suspend = &rv770_suspend,
607         .resume = &rv770_resume,
608         .cp_commit = &r600_cp_commit,
609         .vram_info = NULL,
610         .gpu_reset = &rv770_gpu_reset,
611         .mc_init = NULL,
612         .mc_fini = NULL,
613         .wb_init = &r600_wb_init,
614         .wb_fini = &r600_wb_fini,
615         .gart_enable = NULL,
616         .gart_disable = NULL,
617         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
618         .gart_set_page = &rs600_gart_set_page,
619         .cp_init = NULL,
620         .cp_fini = NULL,
621         .cp_disable = NULL,
622         .ring_start = NULL,
623         .ring_test = &r600_ring_test,
624         .ring_ib_execute = &r600_ring_ib_execute,
625         .ib_test = &r600_ib_test,
626         .irq_set = &r600_irq_set,
627         .irq_process = &r600_irq_process,
628         .fence_ring_emit = &r600_fence_ring_emit,
629         .cs_parse = &r600_cs_parse,
630         .copy_blit = &r600_copy_blit,
631         .copy_dma = &r600_copy_blit,
632         .copy = &r600_copy_blit,
633         .set_engine_clock = &radeon_atom_set_engine_clock,
634         .set_memory_clock = &radeon_atom_set_memory_clock,
635         .set_pcie_lanes = NULL,
636         .set_clock_gating = &radeon_atom_set_clock_gating,
637         .set_surface_reg = r600_set_surface_reg,
638         .clear_surface_reg = r600_clear_surface_reg,
639         .bandwidth_update = &rv515_bandwidth_update,
640 };
641
642 #endif